Bug 1423241 - Move MediaManager from a stream listener to track listeners. r=padenot
authorAndreas Pehrson <apehrson@mozilla.com>
Fri, 23 Nov 2018 15:00:20 +0000
changeset 504257 12f6bb0888a2c9359758be3f62e06377c0ad1333
parent 504256 f7fc271746a2e216d8a6dfbb97d256bf0e156f0b
child 504258 befba547fb5850fd62d4e31784aa4b5198404500
push id10290
push userffxbld-merge
push dateMon, 03 Dec 2018 16:23:23 +0000
treeherdermozilla-beta@700bed2445e6 [default view] [failures only]
perfherder[talos] [build metrics] [platform microbench] (compared to previous push)
reviewerspadenot
bugs1423241
milestone65.0a1
first release with
nightly linux32
nightly linux64
nightly mac
nightly win32
nightly win64
last release without
nightly linux32
nightly linux64
nightly mac
nightly win32
nightly win64
Bug 1423241 - Move MediaManager from a stream listener to track listeners. r=padenot Differential Revision: https://phabricator.services.mozilla.com/D12268
dom/media/MediaManager.cpp
dom/media/MediaManager.h
dom/media/webrtc/MediaEngineDefault.cpp
dom/media/webrtc/MediaEngineDefault.h
dom/media/webrtc/MediaEngineRemoteVideoSource.cpp
dom/media/webrtc/MediaEngineRemoteVideoSource.h
dom/media/webrtc/MediaEngineSource.h
dom/media/webrtc/MediaEngineTabVideoSource.cpp
dom/media/webrtc/MediaEngineTabVideoSource.h
dom/media/webrtc/MediaEngineWebRTCAudio.cpp
dom/media/webrtc/MediaEngineWebRTCAudio.h
--- a/dom/media/MediaManager.cpp
+++ b/dom/media/MediaManager.cpp
@@ -218,19 +218,24 @@ using dom::Promise;
 using dom::Sequence;
 using media::NewRunnableFrom;
 using media::NewTaskFrom;
 using media::Pledge;
 using media::Refcountable;
 
 static Atomic<bool> sHasShutdown;
 
+class SourceTrackListener;
+
 struct DeviceState {
-  DeviceState(const RefPtr<MediaDevice>& aDevice, bool aOffWhileDisabled)
-      : mOffWhileDisabled(aOffWhileDisabled), mDevice(aDevice) {
+  DeviceState(const RefPtr<MediaDevice>& aDevice, bool aOffWhileDisabled,
+              RefPtr<SourceTrackListener> aListener)
+      : mOffWhileDisabled(aOffWhileDisabled),
+        mDevice(aDevice),
+        mListener(std::move(aListener)) {
     MOZ_ASSERT(mDevice);
   }
 
   // true if we have stopped mDevice, this is a terminal state.
   // MainThread only.
   bool mStopped = false;
 
   // true if mDevice is currently enabled, i.e., turned on and capturing.
@@ -259,16 +264,20 @@ struct DeviceState {
   // disabled. When the timer fires we initiate Stop()ing mDevice.
   // If set we allow dynamically stopping and starting mDevice.
   // Any thread.
   const RefPtr<MediaTimer> mDisableTimer = new MediaTimer();
 
   // The underlying device we keep state for. Always non-null.
   // Threadsafe access, but see method declarations for individual constraints.
   const RefPtr<MediaDevice> mDevice;
+
+  // The track listener for the track hooked up to mDevice.
+  // Main thread only.
+  RefPtr<SourceTrackListener> mListener;
 };
 
 /**
  * This mimics the capture state from nsIMediaManagerService.
  */
 enum class CaptureState : uint16_t {
   Off = nsIMediaManagerService::STATE_NOCAPTURE,
   Enabled = nsIMediaManagerService::STATE_CAPTURE_ENABLED,
@@ -419,29 +428,24 @@ class SourceListener : public SupportsWe
   MediaDevice* GetVideoDevice() const {
     return mVideoDeviceState ? mVideoDeviceState->mDevice.get() : nullptr;
   }
 
   /**
    * Called on MediaStreamGraph thread when MSG asks us for more data from
    * input devices.
    */
-  void NotifyPull(MediaStreamGraph* aGraph, StreamTime aDesiredTime);
+  void Pull(TrackID aTrackID, StreamTime aEndOfAppendedData,
+            StreamTime aDesiredTime);
 
   /**
-   * Called on main thread after MediaStreamGraph notifies us that our
-   * MediaStream was marked finish in the graph.
+   * Called on main thread after MediaStreamGraph notifies us that one of our
+   * track listeners was removed as listener from its track in the graph.
    */
-  void NotifyFinished();
-
-  /**
-   * Called on main thread after MediaStreamGraph notifies us that we
-   * were removed as listener from the MediaStream in the graph.
-   */
-  void NotifyRemoved();
+  void NotifyRemoved(TrackID aTrackID);
 
   bool Activated() const { return mStream; }
 
   bool Stopped() const { return mStopped; }
 
   bool CapturingVideo() const;
 
   bool CapturingAudio() const;
@@ -451,80 +455,16 @@ class SourceListener : public SupportsWe
   RefPtr<ApplyConstraintsPromise> ApplyConstraintsToTrack(
       nsPIDOMWindowInner* aWindow, TrackID aTrackID,
       const dom::MediaTrackConstraints& aConstraints,
       dom::CallerType aCallerType);
 
   PrincipalHandle GetPrincipalHandle() const;
 
  private:
-  /**
-   * Wrapper class for the MediaStreamListener part of SourceListener.
-   *
-   * This is required since MediaStreamListener and SupportsWeakPtr
-   * both implement refcounting.
-   */
-  class SourceStreamListener : public MediaStreamListener {
-   public:
-    explicit SourceStreamListener(SourceListener* aSourceListener)
-        : mSourceListener(aSourceListener) {}
-
-    void NotifyPull(MediaStreamGraph* aGraph,
-                    StreamTime aDesiredTime) override {
-      mSourceListener->NotifyPull(aGraph, aDesiredTime);
-    }
-
-    void NotifyEvent(MediaStreamGraph* aGraph,
-                     MediaStreamGraphEvent aEvent) override {
-      nsCOMPtr<nsIEventTarget> target;
-
-      switch (aEvent) {
-        case MediaStreamGraphEvent::EVENT_FINISHED:
-          target = GetMainThreadEventTarget();
-          if (NS_WARN_IF(!target)) {
-            NS_ASSERTION(false,
-                         "Mainthread not available; running on current thread");
-            // Ensure this really *was* MainThread (NS_GetCurrentThread won't
-            // work)
-            MOZ_RELEASE_ASSERT(mSourceListener->mMainThreadCheck ==
-                               GetCurrentVirtualThread());
-            mSourceListener->NotifyFinished();
-            return;
-          }
-          target->Dispatch(NewRunnableMethod("SourceListener::NotifyFinished",
-                                             mSourceListener,
-                                             &SourceListener::NotifyFinished),
-                           NS_DISPATCH_NORMAL);
-          break;
-        case MediaStreamGraphEvent::EVENT_REMOVED:
-          target = GetMainThreadEventTarget();
-          if (NS_WARN_IF(!target)) {
-            NS_ASSERTION(false,
-                         "Mainthread not available; running on current thread");
-            // Ensure this really *was* MainThread (NS_GetCurrentThread won't
-            // work)
-            MOZ_RELEASE_ASSERT(mSourceListener->mMainThreadCheck ==
-                               GetCurrentVirtualThread());
-            mSourceListener->NotifyRemoved();
-            return;
-          }
-          target->Dispatch(NewRunnableMethod("SourceListener::NotifyRemoved",
-                                             mSourceListener,
-                                             &SourceListener::NotifyRemoved),
-                           NS_DISPATCH_NORMAL);
-          break;
-        default:
-          break;
-      }
-    }
-
-   private:
-    RefPtr<SourceListener> mSourceListener;
-  };
-
   virtual ~SourceListener() = default;
 
   /**
    * Returns a pointer to the device state for aTrackID.
    *
    * This is intended for internal use where we need to figure out which state
    * corresponds to aTrackID, not for availability checks. As such, we assert
    * that the device does indeed exist.
@@ -532,43 +472,80 @@ class SourceListener : public SupportsWe
    * Since this is a raw pointer and the state lifetime depends on the
    * SourceListener's lifetime, it's internal use only.
    */
   DeviceState& GetDeviceStateFor(TrackID aTrackID) const;
 
   // true after this listener has had all devices stopped. MainThread only.
   bool mStopped;
 
-  // true after the stream this listener is listening to has finished in the
-  // MediaStreamGraph. MainThread only.
-  bool mFinished;
-
   // true after this listener has been removed from its MediaStream.
   // MainThread only.
   bool mRemoved;
 
   // never ever indirect off this; just for assertions
   PRThread* mMainThreadCheck;
 
+  // For access to mMainThreadCheck
+  friend class SourceTrackListener;
+
   // Set in Register() on main thread, then read from any thread.
   PrincipalHandle mPrincipalHandle;
 
   // Weak pointer to the window listener that owns us. MainThread only.
   GetUserMediaWindowListener* mWindowListener;
 
   // Accessed from MediaStreamGraph thread, MediaManager thread, and MainThread
   // No locking needed as they're set on Activate() and never assigned to again.
   UniquePtr<DeviceState> mAudioDeviceState;
   UniquePtr<DeviceState> mVideoDeviceState;
-  RefPtr<SourceMediaStream> mStream;             // threadsafe refcnt
-  RefPtr<SourceStreamListener> mStreamListener;  // threadsafe refcnt
+  RefPtr<SourceMediaStream> mStream;  // threadsafe refcnt
 };
 
 /**
- * This class represents a WindowID and handles all MediaStreamListeners
+ * Wrapper class for the MediaStreamTrackListener part of SourceListener.
+ *
+ * This is required since MediaStreamTrackListener and SupportsWeakPtr
+ * both implement refcounting.
+ */
+class SourceTrackListener : public MediaStreamTrackListener {
+ public:
+  SourceTrackListener(SourceListener* aSourceListener, TrackID aTrackID)
+      : mSourceListener(aSourceListener), mTrackID(aTrackID) {}
+
+  void NotifyPull(MediaStreamGraph* aGraph, StreamTime aEndOfAppendedData,
+                  StreamTime aDesiredTime) override {
+    mSourceListener->Pull(mTrackID, aEndOfAppendedData, aDesiredTime);
+  }
+
+  void NotifyEnded() override { NotifyRemoved(); }
+
+  void NotifyRemoved() override {
+    nsCOMPtr<nsIEventTarget> target = GetMainThreadEventTarget();
+    if (NS_WARN_IF(!target)) {
+      NS_ASSERTION(false,
+                   "Mainthread not available; running on current thread");
+      // Ensure this really *was* MainThread (NS_GetCurrentThread won't work)
+      MOZ_RELEASE_ASSERT(mSourceListener->mMainThreadCheck ==
+                         GetCurrentVirtualThread());
+      mSourceListener->NotifyRemoved(mTrackID);
+      return;
+    }
+    target->Dispatch(NewRunnableMethod<TrackID>(
+        "SourceListener::NotifyRemoved", mSourceListener,
+        &SourceListener::NotifyRemoved, mTrackID));
+  }
+
+ private:
+  const RefPtr<SourceListener> mSourceListener;
+  const TrackID mTrackID;
+};
+
+/**
+ * This class represents a WindowID and handles all MediaStreamTrackListeners
  * (here subclassed as SourceListeners) used to feed GetUserMedia source
  * streams. It proxies feedback from them into messages for browser chrome.
  * The SourceListeners are used to Start() and Stop() the underlying
  * MediaEngineSource when MediaStreams are assigned and deassigned in content.
  */
 class GetUserMediaWindowListener {
   friend MediaManager;
 
@@ -1100,23 +1077,25 @@ nsresult MediaDevice::Stop() {
 
 nsresult MediaDevice::Deallocate() {
   MOZ_ASSERT(MediaManager::IsInMediaThread());
   MOZ_ASSERT(mSource);
   return mSource->Deallocate(mAllocationHandle);
 }
 
 void MediaDevice::Pull(const RefPtr<SourceMediaStream>& aStream,
-                       TrackID aTrackID, StreamTime aDesiredTime,
+                       TrackID aTrackID, StreamTime aEndOfAppendedData,
+                       StreamTime aDesiredTime,
                        const PrincipalHandle& aPrincipal) {
   // This is on the graph thread, but mAllocationHandle is safe since we never
   // change it after it's been set, which is guaranteed to happen before
   // registering the listener for pulls.
   MOZ_ASSERT(mSource);
-  mSource->Pull(mAllocationHandle, aStream, aTrackID, aDesiredTime, aPrincipal);
+  mSource->Pull(mAllocationHandle, aStream, aTrackID, aEndOfAppendedData,
+                aDesiredTime, aPrincipal);
 }
 
 dom::MediaSourceEnum MediaDevice::GetMediaSource() const {
   // Threadsafe because mSource is const. GetMediaSource() might have other
   // requirements.
   MOZ_ASSERT(mSource);
   return mSource->GetMediaSource();
 }
@@ -4117,17 +4096,16 @@ bool MediaManager::IsActivelyCapturingOr
     }
   }
   return audio == nsIPermissionManager::ALLOW_ACTION ||
          video == nsIPermissionManager::ALLOW_ACTION;
 }
 
 SourceListener::SourceListener()
     : mStopped(false),
-      mFinished(false),
       mRemoved(false),
       mMainThreadCheck(nullptr),
       mPrincipalHandle(PRINCIPAL_HANDLE_NONE),
       mWindowListener(nullptr) {}
 
 void SourceListener::Register(GetUserMediaWindowListener* aListener) {
   LOG(("SourceListener %p registering with window listener %p", this,
        aListener));
@@ -4148,35 +4126,36 @@ void SourceListener::Activate(SourceMedi
   LOG(("SourceListener %p activating audio=%p video=%p", this, aAudioDevice,
        aVideoDevice));
 
   MOZ_ASSERT(!mStopped, "Cannot activate stopped source listener");
   MOZ_ASSERT(!Activated(), "Already activated");
 
   mMainThreadCheck = GetCurrentVirtualThread();
   mStream = aStream;
-  mStreamListener = new SourceStreamListener(this);
   if (aAudioDevice) {
     mAudioDeviceState = MakeUnique<DeviceState>(
         aAudioDevice,
         aAudioDevice->GetMediaSource() == dom::MediaSourceEnum::Microphone &&
             Preferences::GetBool(
                 "media.getusermedia.microphone.off_while_disabled.enabled",
-                true));
+                true),
+        MakeRefPtr<SourceTrackListener>(this, kAudioTrack));
+    mStream->AddTrackListener(mAudioDeviceState->mListener, kAudioTrack);
   }
 
   if (aVideoDevice) {
     mVideoDeviceState = MakeUnique<DeviceState>(
         aVideoDevice,
         aVideoDevice->GetMediaSource() == dom::MediaSourceEnum::Camera &&
             Preferences::GetBool(
-                "media.getusermedia.camera.off_while_disabled.enabled", true));
+                "media.getusermedia.camera.off_while_disabled.enabled", true),
+        MakeRefPtr<SourceTrackListener>(this, kVideoTrack));
+    mStream->AddTrackListener(mVideoDeviceState->mListener, kVideoTrack);
   }
-
-  mStream->AddListener(mStreamListener);
 }
 
 RefPtr<SourceListener::InitPromise> SourceListener::InitializeAsync() {
   MOZ_ASSERT(NS_IsMainThread(), "Only call on main thread");
   MOZ_DIAGNOSTIC_ASSERT(!mStopped);
 
   RefPtr<InitPromise> init = MediaManager::PostTask<InitPromise>(
       __func__,
@@ -4299,52 +4278,57 @@ void SourceListener::Stop() {
   MOZ_ASSERT(mStream, "Can't end tracks. No source stream.");
 
   if (mAudioDeviceState && !mAudioDeviceState->mStopped) {
     StopTrack(kAudioTrack);
   }
   if (mVideoDeviceState && !mVideoDeviceState->mStopped) {
     StopTrack(kVideoTrack);
   }
-
-  MediaManager::PostTask(NewTaskFrom([source = mStream]() {
-    MOZ_ASSERT(MediaManager::IsInMediaThread());
-    source->EndAllTrackAndFinish();
-  }));
 }
 
 void SourceListener::Remove() {
   MOZ_ASSERT(NS_IsMainThread());
 
   if (mAudioDeviceState) {
     mAudioDeviceState->mDisableTimer->Cancel();
   }
   if (mVideoDeviceState) {
     mVideoDeviceState->mDisableTimer->Cancel();
   }
 
   if (!mStream || mRemoved) {
     return;
   }
 
-  LOG(("SourceListener %p removed on purpose, mFinished = %d", this,
-       (int)mFinished));
+  LOG(("SourceListener %p removed on purpose", this));
   mRemoved = true;  // RemoveListener is async, avoid races
   mWindowListener = nullptr;
 
   // If it's destroyed, don't call - listener will be removed and we'll be
   // notified!
   if (!mStream->IsDestroyed()) {
     // We disable pulling before removing so we don't risk having live tracks
     // without a listener attached - that wouldn't produce data and would be
     // illegal to the graph.
     mStream->SetPullEnabled(false);
-    mStream->RemoveListener(mStreamListener);
+    if (mAudioDeviceState) {
+      mStream->RemoveTrackListener(mAudioDeviceState->mListener, kAudioTrack);
+    }
+    if (mVideoDeviceState) {
+      mStream->RemoveTrackListener(mVideoDeviceState->mListener, kVideoTrack);
+    }
   }
-  mStreamListener = nullptr;
+
+  if (mAudioDeviceState) {
+    mAudioDeviceState->mListener = nullptr;
+  }
+  if (mVideoDeviceState) {
+    mVideoDeviceState->mListener = nullptr;
+  }
 }
 
 void SourceListener::StopTrack(TrackID aTrackID) {
   MOZ_ASSERT(NS_IsMainThread(), "Only call on main thread");
   MOZ_ASSERT(Activated(), "No device to stop");
   MOZ_ASSERT(aTrackID == kAudioTrack || aTrackID == kVideoTrack,
              "Unknown track id");
   DeviceState& state = GetDeviceStateFor(aTrackID);
@@ -4587,54 +4571,45 @@ void SourceListener::StopSharing() {
   }
 }
 
 SourceMediaStream* SourceListener::GetSourceStream() {
   NS_ASSERTION(mStream, "Getting stream from never-activated SourceListener");
   return mStream;
 }
 
-// Proxy NotifyPull() to sources
-void SourceListener::NotifyPull(MediaStreamGraph* aGraph,
-                                StreamTime aDesiredTime) {
-  if (mAudioDeviceState) {
-    mAudioDeviceState->mDevice->Pull(mStream, kAudioTrack, aDesiredTime,
-                                     mPrincipalHandle);
-  }
-  if (mVideoDeviceState) {
-    mVideoDeviceState->mDevice->Pull(mStream, kVideoTrack, aDesiredTime,
-                                     mPrincipalHandle);
-  }
+// Proxy Pull() to the right source
+void SourceListener::Pull(TrackID aTrackID, StreamTime aEndOfAppendedData,
+                          StreamTime aDesiredTime) {
+  DeviceState& state = GetDeviceStateFor(aTrackID);
+  state.mDevice->Pull(mStream, aTrackID, aEndOfAppendedData, aDesiredTime,
+                      mPrincipalHandle);
 }
 
-void SourceListener::NotifyFinished() {
+void SourceListener::NotifyRemoved(TrackID aTrackID) {
   MOZ_ASSERT(NS_IsMainThread());
-  mFinished = true;
-  if (!mWindowListener) {
-    // Removed explicitly before finished.
+  LOG(("Track %d for SourceListener %p removed", aTrackID, this));
+
+  StopTrack(aTrackID);
+
+  if (!mStopped) {
+    // There are more live tracks that need to be stopped before removal.
     return;
   }
 
-  LOG(("SourceListener %p NotifyFinished", this));
-
-  Stop();  // we know it's been activated
+  if (!mWindowListener) {
+    // Removed explicitly before MSG's notification.
+    return;
+  }
+
   mWindowListener->Remove(this);
-}
-
-void SourceListener::NotifyRemoved() {
-  MOZ_ASSERT(NS_IsMainThread());
-  LOG(("SourceListener removed, mFinished = %d", (int)mFinished));
-  mRemoved = true;
-
-  if (Activated() && !mFinished) {
-    NotifyFinished();
-  }
-
-  mWindowListener = nullptr;
-  mStreamListener = nullptr;
+
+  MOZ_ASSERT(!mWindowListener);
+  MOZ_ASSERT_IF(mAudioDeviceState, !mAudioDeviceState->mListener);
+  MOZ_ASSERT_IF(mVideoDeviceState, !mVideoDeviceState->mListener);
 }
 
 bool SourceListener::CapturingVideo() const {
   MOZ_ASSERT(NS_IsMainThread());
   return Activated() && mVideoDeviceState && !mVideoDeviceState->mStopped &&
          (!mVideoDeviceState->mDevice->mSource->IsFake() ||
           Preferences::GetBool("media.navigator.permission.fake"));
 }
--- a/dom/media/MediaManager.h
+++ b/dom/media/MediaManager.h
@@ -95,17 +95,18 @@ class MediaDevice : public nsIMediaDevic
   nsresult Reconfigure(const dom::MediaTrackConstraints& aConstraints,
                        const MediaEnginePrefs& aPrefs,
                        const char** aOutBadConstraint);
   nsresult FocusOnSelectedSource();
   nsresult Stop();
   nsresult Deallocate();
 
   void Pull(const RefPtr<SourceMediaStream>& aStream, TrackID aTrackID,
-            StreamTime aDesiredTime, const PrincipalHandle& aPrincipal);
+            StreamTime aEndOfAppendedData, StreamTime aDesiredTime,
+            const PrincipalHandle& aPrincipal);
 
   void GetSettings(dom::MediaTrackSettings& aOutSettings) const;
 
   dom::MediaSourceEnum GetMediaSource() const;
 
  protected:
   virtual ~MediaDevice() = default;
 
--- a/dom/media/webrtc/MediaEngineDefault.cpp
+++ b/dom/media/webrtc/MediaEngineDefault.cpp
@@ -305,17 +305,18 @@ void MediaEngineDefaultVideoSource::Gene
 
   MutexAutoLock lock(mMutex);
   mImage = std::move(ycbcr_image);
 }
 
 void MediaEngineDefaultVideoSource::Pull(
     const RefPtr<const AllocationHandle>& aHandle,
     const RefPtr<SourceMediaStream>& aStream, TrackID aTrackID,
-    StreamTime aDesiredTime, const PrincipalHandle& aPrincipalHandle) {
+    StreamTime aEndOfAppendedData, StreamTime aDesiredTime,
+    const PrincipalHandle& aPrincipalHandle) {
   TRACE_AUDIO_CALLBACK_COMMENT("SourceMediaStream %p track %i", aStream.get(),
                                aTrackID);
   // AppendFrame takes ownership of `segment`
   VideoSegment segment;
 
   RefPtr<layers::Image> image;
   {
     MutexAutoLock lock(mMutex);
@@ -329,25 +330,25 @@ void MediaEngineDefaultVideoSource::Pull
     MOZ_ASSERT(mState != kAllocated);
     if (mState == kStarted) {
       MOZ_ASSERT(mStream == aStream);
       MOZ_ASSERT(mTrackID == aTrackID);
       image = mImage;
     }
   }
 
-  StreamTime delta = aDesiredTime - aStream->GetEndOfAppendedData(aTrackID);
-  if (delta > 0) {
-    // nullptr images are allowed
-    IntSize size(mOpts.mWidth, mOpts.mHeight);
-    segment.AppendFrame(image.forget(), delta, size, aPrincipalHandle);
-    // This can fail if either a) we haven't added the track yet, or b)
-    // we've removed or finished the track.
-    aStream->AppendToTrack(aTrackID, &segment);
-  }
+  StreamTime delta = aDesiredTime - aEndOfAppendedData;
+  MOZ_ASSERT(delta > 0);
+
+  // nullptr images are allowed
+  IntSize size(mOpts.mWidth, mOpts.mHeight);
+  segment.AppendFrame(image.forget(), delta, size, aPrincipalHandle);
+  // This can fail if either a) we haven't added the track yet, or b)
+  // we've removed or finished the track.
+  aStream->AppendToTrack(aTrackID, &segment);
 }
 
 /**
  * Default audio source.
  */
 
 MediaEngineDefaultAudioSource::MediaEngineDefaultAudioSource()
     : mMutex("MediaEngineDefaultAudioSource::mMutex") {}
@@ -494,17 +495,18 @@ void MediaEngineDefaultAudioSource::Appe
   AutoTArray<const int16_t*, 1> channels;
   channels.AppendElement(dest);
   aSegment.AppendFrames(buffer.forget(), channels, aSamples, aPrincipalHandle);
 }
 
 void MediaEngineDefaultAudioSource::Pull(
     const RefPtr<const AllocationHandle>& aHandle,
     const RefPtr<SourceMediaStream>& aStream, TrackID aTrackID,
-    StreamTime aDesiredTime, const PrincipalHandle& aPrincipalHandle) {
+    StreamTime aEndOfAppendedData, StreamTime aDesiredTime,
+    const PrincipalHandle& aPrincipalHandle) {
   TRACE_AUDIO_CALLBACK_COMMENT("SourceMediaStream %p track %i", aStream.get(),
                                aTrackID);
   AudioSegment segment;
   // avoid accumulating rounding errors
   TrackTicks desired =
       aStream->TimeToTicksRoundUp(aStream->GraphRate(), aDesiredTime);
   TrackTicks delta = desired - mLastNotify;
   mLastNotify += delta;
--- a/dom/media/webrtc/MediaEngineDefault.h
+++ b/dom/media/webrtc/MediaEngineDefault.h
@@ -55,17 +55,17 @@ class MediaEngineDefaultVideoSource : pu
                        const dom::MediaTrackConstraints& aConstraints,
                        const MediaEnginePrefs& aPrefs,
                        const nsString& aDeviceId,
                        const char** aOutBadConstraint) override;
   nsresult Stop(const RefPtr<const AllocationHandle>& aHandle) override;
   nsresult Deallocate(const RefPtr<const AllocationHandle>& aHandle) override;
   void Pull(const RefPtr<const AllocationHandle>& aHandle,
             const RefPtr<SourceMediaStream>& aStream, TrackID aTrackID,
-            StreamTime aDesiredTime,
+            StreamTime aEndOfAppendedData, StreamTime aDesiredTime,
             const PrincipalHandle& aPrincipalHandle) override;
 
   uint32_t GetBestFitnessDistance(
       const nsTArray<const NormalizedConstraintSet*>& aConstraintSets,
       const nsString& aDeviceId) const override;
 
   bool IsFake() const override { return true; }
 
@@ -124,17 +124,17 @@ class MediaEngineDefaultAudioSource : pu
                        const nsString& aDeviceId,
                        const char** aOutBadConstraint) override;
   nsresult Stop(const RefPtr<const AllocationHandle>& aHandle) override;
   nsresult Deallocate(const RefPtr<const AllocationHandle>& aHandle) override;
   void inline AppendToSegment(AudioSegment& aSegment, TrackTicks aSamples,
                               const PrincipalHandle& aPrincipalHandle);
   void Pull(const RefPtr<const AllocationHandle>& aHandle,
             const RefPtr<SourceMediaStream>& aStream, TrackID aTrackID,
-            StreamTime aDesiredTime,
+            StreamTime aEndOfAppendedData, StreamTime aDesiredTime,
             const PrincipalHandle& aPrincipalHandle) override;
 
   bool IsFake() const override { return true; }
 
   dom::MediaSourceEnum GetMediaSource() const override {
     return dom::MediaSourceEnum::Microphone;
   }
 
--- a/dom/media/webrtc/MediaEngineRemoteVideoSource.cpp
+++ b/dom/media/webrtc/MediaEngineRemoteVideoSource.cpp
@@ -457,31 +457,30 @@ webrtc::CaptureCapability MediaEngineRem
   camera::GetChildAndCall(&camera::CamerasChild::GetCaptureCapability,
                           mCapEngine, mUniqueId.get(), aIndex, result);
   return result;
 }
 
 void MediaEngineRemoteVideoSource::Pull(
     const RefPtr<const AllocationHandle>& aHandle,
     const RefPtr<SourceMediaStream>& aStream, TrackID aTrackID,
-    StreamTime aDesiredTime, const PrincipalHandle& aPrincipalHandle) {
+    StreamTime aEndOfAppendedData, StreamTime aDesiredTime,
+    const PrincipalHandle& aPrincipalHandle) {
   TRACE_AUDIO_CALLBACK_COMMENT("SourceMediaStream %p track %i", aStream.get(),
                                aTrackID);
   MutexAutoLock lock(mMutex);
   if (mState == kReleased) {
     // We end the track before deallocating, so this is safe.
     return;
   }
 
   MOZ_ASSERT(mState == kStarted || mState == kStopped);
 
-  StreamTime delta = aDesiredTime - aStream->GetEndOfAppendedData(aTrackID);
-  if (delta <= 0) {
-    return;
-  }
+  StreamTime delta = aDesiredTime - aEndOfAppendedData;
+  MOZ_ASSERT(delta > 0);
 
   VideoSegment segment;
   RefPtr<layers::Image> image = mImage;
   if (mState == kStarted) {
     MOZ_ASSERT(!image || mImageSize == image->GetSize());
     segment.AppendFrame(image.forget(), delta, mImageSize, aPrincipalHandle);
   } else {
     // nullptr images are allowed, but we force it to black and retain the size.
--- a/dom/media/webrtc/MediaEngineRemoteVideoSource.h
+++ b/dom/media/webrtc/MediaEngineRemoteVideoSource.h
@@ -133,17 +133,17 @@ class MediaEngineRemoteVideoSource : pub
                        const MediaEnginePrefs& aPrefs,
                        const nsString& aDeviceId,
                        const char** aOutBadConstraint) override;
   nsresult FocusOnSelectedSource(
       const RefPtr<const AllocationHandle>& aHandle) override;
   nsresult Stop(const RefPtr<const AllocationHandle>& aHandle) override;
   void Pull(const RefPtr<const AllocationHandle>& aHandle,
             const RefPtr<SourceMediaStream>& aStream, TrackID aTrackID,
-            StreamTime aDesiredTime,
+            StreamTime aEndOfAppendedData, StreamTime aDesiredTime,
             const PrincipalHandle& aPrincipalHandle) override;
 
   void GetSettings(dom::MediaTrackSettings& aOutSettings) const override;
 
   void Refresh(int aIndex);
 
   void Shutdown() override;
 
--- a/dom/media/webrtc/MediaEngineSource.h
+++ b/dom/media/webrtc/MediaEngineSource.h
@@ -243,21 +243,21 @@ class MediaEngineSourceInterface {
    * GetSettings() will return the scaled resolution. I.e., the
    * device settings as seen by js.
    */
   virtual void GetSettings(dom::MediaTrackSettings& aOutSettings) const = 0;
 
   /**
    * Pulls data from the MediaEngineSource into the track.
    *
-   * Driven by MediaStreamListener::NotifyPull.
+   * Driven by MediaStreamTrackListener::NotifyPull.
    */
   virtual void Pull(const RefPtr<const AllocationHandle>& aHandle,
                     const RefPtr<SourceMediaStream>& aStream, TrackID aTrackID,
-                    StreamTime aDesiredTime,
+                    StreamTime aEndOfAppendedData, StreamTime aDesiredTime,
                     const PrincipalHandle& aPrincipalHandle) = 0;
 };
 
 /**
  * Abstract base class for MediaEngineSources.
  *
  * Implements defaults for some common MediaEngineSourceInterface methods below.
  * Also implements RefPtr support and an owning-thread model for thread safety
--- a/dom/media/webrtc/MediaEngineTabVideoSource.cpp
+++ b/dom/media/webrtc/MediaEngineTabVideoSource.cpp
@@ -229,17 +229,18 @@ nsresult MediaEngineTabVideoSource::Star
   }
 
   return NS_OK;
 }
 
 void MediaEngineTabVideoSource::Pull(
     const RefPtr<const AllocationHandle>& aHandle,
     const RefPtr<SourceMediaStream>& aStream, TrackID aTrackID,
-    StreamTime aDesiredTime, const PrincipalHandle& aPrincipalHandle) {
+    StreamTime aEndOfAppendedData, StreamTime aDesiredTime,
+    const PrincipalHandle& aPrincipalHandle) {
   TRACE_AUDIO_CALLBACK_COMMENT("SourceMediaStream %p track %i", aStream.get(),
                                aTrackID);
   VideoSegment segment;
   RefPtr<layers::Image> image;
   gfx::IntSize imageSize;
 
   {
     MutexAutoLock lock(mMutex);
@@ -248,20 +249,18 @@ void MediaEngineTabVideoSource::Pull(
       return;
     }
     if (mState == kStarted) {
       image = mImage;
       imageSize = mImageSize;
     }
   }
 
-  StreamTime delta = aDesiredTime - aStream->GetEndOfAppendedData(aTrackID);
-  if (delta <= 0) {
-    return;
-  }
+  StreamTime delta = aDesiredTime - aEndOfAppendedData;
+  MOZ_ASSERT(delta > 0);
 
   // nullptr images are allowed
   segment.AppendFrame(image.forget(), delta, imageSize, aPrincipalHandle);
   // This can fail if either a) we haven't added the track yet, or b)
   // we've removed or ended the track.
   aStream->AppendToTrack(aTrackID, &(segment));
 }
 
--- a/dom/media/webrtc/MediaEngineTabVideoSource.h
+++ b/dom/media/webrtc/MediaEngineTabVideoSource.h
@@ -40,17 +40,17 @@ class MediaEngineTabVideoSource : public
                        const nsString& aDeviceId,
                        const char** aOutBadConstraint) override;
   nsresult FocusOnSelectedSource(
       const RefPtr<const AllocationHandle>& aHandle) override;
   nsresult Stop(const RefPtr<const AllocationHandle>& aHandle) override;
 
   void Pull(const RefPtr<const AllocationHandle>& aHandle,
             const RefPtr<SourceMediaStream>& aStream, TrackID aTrackID,
-            StreamTime aDesiredTime,
+            StreamTime aEndOfAppendedData, StreamTime aDesiredTime,
             const PrincipalHandle& aPrincipalHandle) override;
 
   uint32_t GetBestFitnessDistance(
       const nsTArray<const NormalizedConstraintSet*>& aConstraintSets,
       const nsString& aDeviceId) const override {
     return 0;
   }
 
--- a/dom/media/webrtc/MediaEngineWebRTCAudio.cpp
+++ b/dom/media/webrtc/MediaEngineWebRTCAudio.cpp
@@ -186,21 +186,23 @@ nsresult MediaEngineWebRTCMicrophoneSour
   mCurrentPrefs = outputPrefs;
 
   return NS_OK;
 }
 
 void MediaEngineWebRTCMicrophoneSource::Pull(
     const RefPtr<const AllocationHandle>&,
     const RefPtr<SourceMediaStream>& aStream, TrackID aTrackID,
-    StreamTime aDesiredTime, const PrincipalHandle& aPrincipalHandle) {
+    StreamTime aEndOfAppendedData, StreamTime aDesiredTime,
+    const PrincipalHandle& aPrincipalHandle) {
   // If pull is enabled, it means that the audio input is not open, and we
   // should fill it out with silence. This is the only method called on the
   // MSG thread.
-  mInputProcessing->Pull(aStream, aTrackID, aDesiredTime, aPrincipalHandle);
+  mInputProcessing->Pull(aStream, aTrackID, aEndOfAppendedData, aDesiredTime,
+                         aPrincipalHandle);
 }
 
 void MediaEngineWebRTCMicrophoneSource::UpdateAECSettings(
     bool aEnable, bool aUseAecMobile,
     EchoCancellation::SuppressionLevel aLevel) {
   AssertIsOnOwningThread();
 
   RefPtr<MediaEngineWebRTCMicrophoneSource> that = this;
@@ -789,33 +791,28 @@ void AudioInputProcessing::UpdateAPMExtr
   mAudioProcessing->SetExtraOptions(config);
 }
 
 void AudioInputProcessing::Start() { mEnabled = true; }
 
 void AudioInputProcessing::Stop() { mEnabled = false; }
 
 void AudioInputProcessing::Pull(const RefPtr<SourceMediaStream>& aStream,
-                                TrackID aTrackID, StreamTime aDesiredTime,
+                                TrackID aTrackID, StreamTime aEndOfAppendedData,
+                                StreamTime aDesiredTime,
                                 const PrincipalHandle& aPrincipalHandle) {
   TRACE_AUDIO_CALLBACK_COMMENT("SourceMediaStream %p track %i", aStream.get(),
                                aTrackID);
-  StreamTime delta;
 
   if (mEnded) {
     return;
   }
 
-  delta = aDesiredTime - aStream->GetEndOfAppendedData(aTrackID);
-
-  if (delta < 0) {
-    LOG_FRAMES(
-        ("Not appending silence; %" PRId64 " frames already buffered", -delta));
-    return;
-  }
+  StreamTime delta = aDesiredTime - aEndOfAppendedData;
+  MOZ_ASSERT(delta > 0);
 
   if (!mLiveFramesAppended || !mLiveSilenceAppended) {
     // These are the iterations after starting or resuming audio capture.
     // Make sure there's at least one extra block buffered until audio
     // callbacks come in. We also allow appending silence one time after
     // audio callbacks have started, to cover the case where audio callbacks
     // start appending data immediately and there is no extra data buffered.
     delta += WEBAUDIO_BLOCK_SIZE;
--- a/dom/media/webrtc/MediaEngineWebRTCAudio.h
+++ b/dom/media/webrtc/MediaEngineWebRTCAudio.h
@@ -51,17 +51,17 @@ class MediaEngineWebRTCMicrophoneSource 
   nsresult Reconfigure(const RefPtr<AllocationHandle>& aHandle,
                        const dom::MediaTrackConstraints& aConstraints,
                        const MediaEnginePrefs& aPrefs,
                        const nsString& aDeviceId,
                        const char** aOutBadConstraint) override;
 
   void Pull(const RefPtr<const AllocationHandle>& aHandle,
             const RefPtr<SourceMediaStream>& aStream, TrackID aTrackID,
-            StreamTime aDesiredTime,
+            StreamTime aEndOfAppendedData, StreamTime aDesiredTime,
             const PrincipalHandle& aPrincipalHandle) override;
 
   /**
    * Assigns the current settings of the capture to aOutSettings.
    * Main thread only.
    */
   void GetSettings(dom::MediaTrackSettings& aOutSettings) const override;
 
@@ -145,17 +145,18 @@ class MediaEngineWebRTCMicrophoneSource 
 // All communication is done via message passing using MSG ControlMessages
 class AudioInputProcessing : public AudioDataListener {
  public:
   AudioInputProcessing(uint32_t aMaxChannelCount,
                        RefPtr<SourceMediaStream> aStream, TrackID aTrackID,
                        const PrincipalHandle& aPrincipalHandle);
 
   void Pull(const RefPtr<SourceMediaStream>& aStream, TrackID aTrackID,
-            StreamTime aDesiredTime, const PrincipalHandle& aPrincipalHandle);
+            StreamTime aEndOfAppendedData, StreamTime aDesiredTime,
+            const PrincipalHandle& aPrincipalHandle);
 
   void NotifyOutputData(MediaStreamGraphImpl* aGraph, AudioDataValue* aBuffer,
                         size_t aFrames, TrackRate aRate,
                         uint32_t aChannels) override;
   void NotifyInputData(MediaStreamGraphImpl* aGraph,
                        const AudioDataValue* aBuffer, size_t aFrames,
                        TrackRate aRate, uint32_t aChannels) override;
 
@@ -276,18 +277,20 @@ class MediaEngineWebRTCAudioCaptureSourc
   nsresult Reconfigure(const RefPtr<AllocationHandle>& aHandle,
                        const dom::MediaTrackConstraints& aConstraints,
                        const MediaEnginePrefs& aPrefs,
                        const nsString& aDeviceId,
                        const char** aOutBadConstraint) override;
 
   void Pull(const RefPtr<const AllocationHandle>& aHandle,
             const RefPtr<SourceMediaStream>& aStream, TrackID aTrackID,
-            StreamTime aDesiredTime,
-            const PrincipalHandle& aPrincipalHandle) override {}
+            StreamTime aEndOfAppendedData, StreamTime aDesiredTime,
+            const PrincipalHandle& aPrincipalHandle) override {
+    MOZ_ASSERT_UNREACHABLE("Should never have to append silence");
+  }
 
   dom::MediaSourceEnum GetMediaSource() const override {
     return dom::MediaSourceEnum::AudioCapture;
   }
 
   nsresult TakePhoto(MediaEnginePhotoCallback* aCallback) override {
     return NS_ERROR_NOT_IMPLEMENTED;
   }