Backed out 5 changesets (bug 1513973) for browser-chrome failures in browser/base/content/test/webrtc/browser_devices_get_user_media_screen.js. CLOSED TREE
authorDorel Luca <dluca@mozilla.com>
Fri, 21 Dec 2018 23:28:51 +0200
changeset 508869 c559079456f465e97f01fbb592e4f7d7abf1ced8
parent 508868 a3e192395bf8c9e202fa26dc10ba1abaaef0079c
child 508870 c41d930784a8821cf918fb971770a07b0997aa5c
push id10547
push userffxbld-merge
push dateMon, 21 Jan 2019 13:03:58 +0000
treeherdermozilla-beta@24ec1916bffe [default view] [failures only]
perfherder[talos] [build metrics] [platform microbench] (compared to previous push)
bugs1513973
milestone66.0a1
backs out2ddf61b1db11219e203fe17fe5aaa84424fed362
6c839f548745965fa1cc8fc43b42f772d6435b75
5669b30f22651e2df4b4654fdba0c66b107a14a8
6781e633d62cc42d12bd9911c8cb22430d60f0e2
f1892f178011cd6c3d4d169f7bda3728bdb6e528
first release with
nightly linux32
nightly linux64
nightly mac
nightly win32
nightly win64
last release without
nightly linux32
nightly linux64
nightly mac
nightly win32
nightly win64
Backed out 5 changesets (bug 1513973) for browser-chrome failures in browser/base/content/test/webrtc/browser_devices_get_user_media_screen.js. CLOSED TREE Backed out changeset 2ddf61b1db11 (bug 1513973) Backed out changeset 6c839f548745 (bug 1513973) Backed out changeset 5669b30f2265 (bug 1513973) Backed out changeset 6781e633d62c (bug 1513973) Backed out changeset f1892f178011 (bug 1513973)
dom/media/MediaManager.cpp
dom/media/MediaManager.h
dom/media/MediaStreamGraph.cpp
dom/media/webrtc/MediaEngineDefault.cpp
dom/media/webrtc/MediaEngineDefault.h
dom/media/webrtc/MediaEngineRemoteVideoSource.cpp
dom/media/webrtc/MediaEngineRemoteVideoSource.h
dom/media/webrtc/MediaEngineSource.h
dom/media/webrtc/MediaEngineTabVideoSource.cpp
dom/media/webrtc/MediaEngineTabVideoSource.h
dom/media/webrtc/MediaEngineWebRTCAudio.cpp
dom/media/webrtc/MediaEngineWebRTCAudio.h
--- a/dom/media/MediaManager.cpp
+++ b/dom/media/MediaManager.cpp
@@ -982,22 +982,23 @@ nsresult MediaDevice::Allocate(const dom
                                const char** aOutBadConstraint) {
   MOZ_ASSERT(MediaManager::IsInMediaThread());
   MOZ_ASSERT(mSource);
   return mSource->Allocate(aConstraints, aPrefs, mID, aPrincipalInfo,
                            getter_AddRefs(mAllocationHandle),
                            aOutBadConstraint);
 }
 
-void MediaDevice::SetTrack(const RefPtr<SourceMediaStream>& aStream,
-                           TrackID aTrackID,
-                           const PrincipalHandle& aPrincipalHandle) {
+nsresult MediaDevice::SetTrack(const RefPtr<SourceMediaStream>& aStream,
+                               TrackID aTrackID,
+                               const PrincipalHandle& aPrincipalHandle) {
   MOZ_ASSERT(MediaManager::IsInMediaThread());
   MOZ_ASSERT(mSource);
-  mSource->SetTrack(mAllocationHandle, aStream, aTrackID, aPrincipalHandle);
+  return mSource->SetTrack(mAllocationHandle, aStream, aTrackID,
+                           aPrincipalHandle);
 }
 
 nsresult MediaDevice::Start() {
   MOZ_ASSERT(MediaManager::IsInMediaThread());
   MOZ_ASSERT(mSource);
   return mSource->Start(mAllocationHandle);
 }
 
@@ -3979,29 +3980,21 @@ SourceListener::InitializeAsync() {
              __func__,
              [stream = mStream, principal = GetPrincipalHandle(),
               audioDevice =
                   mAudioDeviceState ? mAudioDeviceState->mDevice : nullptr,
               videoDevice =
                   mVideoDeviceState ? mVideoDeviceState->mDevice : nullptr](
                  MozPromiseHolder<SourceListenerPromise>& aHolder) {
                if (audioDevice) {
-                 audioDevice->SetTrack(stream, kAudioTrack, principal);
-               }
-
-               if (videoDevice) {
-                 videoDevice->SetTrack(stream, kVideoTrack, principal);
-               }
-
-               // SetTrack() queued the tracks. We add them synchronously here
-               // to avoid races.
-               stream->FinishAddTracks();
-
-               if (audioDevice) {
-                 nsresult rv = audioDevice->Start();
+                 nsresult rv =
+                     audioDevice->SetTrack(stream, kAudioTrack, principal);
+                 if (NS_SUCCEEDED(rv)) {
+                   rv = audioDevice->Start();
+                 }
                  if (NS_FAILED(rv)) {
                    nsString log;
                    if (rv == NS_ERROR_NOT_AVAILABLE) {
                      log.AssignLiteral("Concurrent mic process limit.");
                      aHolder.Reject(
                          MakeRefPtr<MediaMgrError>(
                              MediaMgrError::Name::NotReadableError, log),
                          __func__);
@@ -4011,32 +4004,39 @@ SourceListener::InitializeAsync() {
                    aHolder.Reject(MakeRefPtr<MediaMgrError>(
                                       MediaMgrError::Name::AbortError, log),
                                   __func__);
                    return;
                  }
                }
 
                if (videoDevice) {
-                 nsresult rv = videoDevice->Start();
+                 nsresult rv =
+                     videoDevice->SetTrack(stream, kVideoTrack, principal);
+                 if (NS_SUCCEEDED(rv)) {
+                   rv = videoDevice->Start();
+                 }
                  if (NS_FAILED(rv)) {
                    if (audioDevice) {
                      if (NS_WARN_IF(NS_FAILED(audioDevice->Stop()))) {
                        MOZ_ASSERT_UNREACHABLE("Stopping audio failed");
                      }
                    }
                    nsString log;
                    log.AssignLiteral("Starting video failed");
                    aHolder.Reject(MakeRefPtr<MediaMgrError>(
                                       MediaMgrError::Name::AbortError, log),
                                   __func__);
                    return;
                  }
                }
 
+               // Start() queued the tracks to be added synchronously to avoid
+               // races
+               stream->FinishAddTracks();
                LOG("started all sources");
                aHolder.Resolve(true, __func__);
              })
       ->Then(GetMainThreadSerialEventTarget(), __func__,
              [self = RefPtr<SourceListener>(this), this]() {
                if (mStopped) {
                  // We were shut down during the async init
                  return SourceListenerPromise::CreateAndResolve(true, __func__);
@@ -4050,18 +4050,25 @@ SourceListener::InitializeAsync() {
                  MOZ_DIAGNOSTIC_ASSERT(!state->mTrackEnabled);
                  MOZ_DIAGNOSTIC_ASSERT(!state->mDeviceEnabled);
                  MOZ_DIAGNOSTIC_ASSERT(!state->mStopped);
 
                  state->mDeviceEnabled = true;
                  state->mTrackEnabled = true;
                  state->mTrackEnabledTime = TimeStamp::Now();
 
-                 if (state == mVideoDeviceState.get()) {
-                   mStream->SetPullingEnabled(kVideoTrack, true);
+                 if (state->mDevice->GetMediaSource() !=
+                     MediaSourceEnum::AudioCapture) {
+                   // For AudioCapture mStream is a dummy stream, so we don't
+                   // try to enable pulling - there won't be a track to enable
+                   // it for.
+                   mStream->SetPullingEnabled(state == mAudioDeviceState.get()
+                                                  ? kAudioTrack
+                                                  : kVideoTrack,
+                                              true);
                  }
                }
                return SourceListenerPromise::CreateAndResolve(true, __func__);
              },
              [self = RefPtr<SourceListener>(this),
               this](RefPtr<MediaMgrError>&& aResult) {
                if (mStopped) {
                  return SourceListenerPromise::CreateAndReject(
@@ -4130,17 +4137,17 @@ void SourceListener::Remove() {
 
   // If it's destroyed, don't call - listener will be removed and we'll be
   // notified!
   if (!mStream->IsDestroyed()) {
     // We disable pulling before removing so we don't risk having live tracks
     // without a listener attached - that wouldn't produce data and would be
     // illegal to the graph.
     if (mAudioDeviceState) {
-      // Audio sources are responsible for disabling pulling themselves.
+      mStream->SetPullingEnabled(kAudioTrack, false);
       mStream->RemoveTrackListener(mAudioDeviceState->mListener, kAudioTrack);
     }
     if (mVideoDeviceState) {
       mStream->SetPullingEnabled(kVideoTrack, false);
       mStream->RemoveTrackListener(mVideoDeviceState->mListener, kVideoTrack);
     }
   }
 
--- a/dom/media/MediaManager.h
+++ b/dom/media/MediaManager.h
@@ -81,18 +81,18 @@ class MediaDevice : public nsIMediaDevic
   uint32_t GetBestFitnessDistance(
       const nsTArray<const NormalizedConstraintSet*>& aConstraintSets,
       bool aIsChrome);
 
   nsresult Allocate(const dom::MediaTrackConstraints& aConstraints,
                     const MediaEnginePrefs& aPrefs,
                     const mozilla::ipc::PrincipalInfo& aPrincipalInfo,
                     const char** aOutBadConstraint);
-  void SetTrack(const RefPtr<SourceMediaStream>& aStream, TrackID aTrackID,
-                const PrincipalHandle& aPrincipal);
+  nsresult SetTrack(const RefPtr<SourceMediaStream>& aStream, TrackID aTrackID,
+                    const PrincipalHandle& aPrincipal);
   nsresult Start();
   nsresult Reconfigure(const dom::MediaTrackConstraints& aConstraints,
                        const MediaEnginePrefs& aPrefs,
                        const char** aOutBadConstraint);
   nsresult FocusOnSelectedSource();
   nsresult Stop();
   nsresult Deallocate();
 
--- a/dom/media/MediaStreamGraph.cpp
+++ b/dom/media/MediaStreamGraph.cpp
@@ -2538,31 +2538,31 @@ bool SourceMediaStream::PullNewData(Grap
   MutexAutoLock lock(mMutex);
   if (mFinished) {
     return false;
   }
   // Compute how much stream time we'll need assuming we don't block
   // the stream at all.
   StreamTime t = GraphTimeToStreamTime(aDesiredUpToTime);
   StreamTime current = mTracks.GetEarliestTrackEnd();
+  LOG(LogLevel::Verbose,
+      ("%p: Calling NotifyPull aStream=%p t=%f current end=%f", GraphImpl(),
+       this, GraphImpl()->MediaTimeToSeconds(t),
+       GraphImpl()->MediaTimeToSeconds(current)));
   for (const TrackData& track : mUpdateTracks) {
     if (!track.mPullingEnabled) {
       continue;
     }
     if (track.mCommands & TrackEventCommand::TRACK_EVENT_ENDED) {
       continue;
     }
     current = track.mEndOfFlushedData + track.mData->GetDuration();
     if (t <= current) {
       continue;
     }
-    LOG(LogLevel::Verbose,
-        ("%p: Calling NotifyPull stream=%p track=%d t=%f current end=%f",
-         GraphImpl(), this, track.mID, GraphImpl()->MediaTimeToSeconds(t),
-         GraphImpl()->MediaTimeToSeconds(current)));
     MutexAutoUnlock unlock(mMutex);
     for (TrackBound<MediaStreamTrackListener>& l : mTrackListeners) {
       if (l.mTrackID == track.mID) {
         l.mListener->NotifyPull(Graph(), current, t);
       }
     }
   }
   return true;
--- a/dom/media/webrtc/MediaEngineDefault.cpp
+++ b/dom/media/webrtc/MediaEngineDefault.cpp
@@ -155,33 +155,34 @@ static void AllocateSolidColorFrame(laye
   aData.mPicSize = IntSize(aWidth, aHeight);
   aData.mStereoMode = StereoMode::MONO;
 }
 
 static void ReleaseFrame(layers::PlanarYCbCrData& aData) {
   free(aData.mYChannel);
 }
 
-void MediaEngineDefaultVideoSource::SetTrack(
+nsresult MediaEngineDefaultVideoSource::SetTrack(
     const RefPtr<const AllocationHandle>& aHandle,
     const RefPtr<SourceMediaStream>& aStream, TrackID aTrackID,
     const PrincipalHandle& aPrincipal) {
   AssertIsOnOwningThread();
 
   MOZ_ASSERT(mState == kAllocated);
   MOZ_ASSERT(!mStream);
   MOZ_ASSERT(mTrackID == TRACK_NONE);
 
   {
     MutexAutoLock lock(mMutex);
     mStream = aStream;
     mTrackID = aTrackID;
   }
   aStream->AddTrack(aTrackID, new VideoSegment(),
                     SourceMediaStream::ADDTRACK_QUEUED);
+  return NS_OK;
 }
 
 nsresult MediaEngineDefaultVideoSource::Start(
     const RefPtr<const AllocationHandle>& aHandle) {
   AssertIsOnOwningThread();
 
   MOZ_ASSERT(mState == kAllocated || mState == kStopped);
   MOZ_ASSERT(mStream, "SetTrack() must happen before Start()");
@@ -418,79 +419,65 @@ nsresult MediaEngineDefaultAudioSource::
     mStream->EndTrack(mTrackID);
     mStream = nullptr;
     mTrackID = TRACK_NONE;
   }
   mState = kReleased;
   return NS_OK;
 }
 
-void MediaEngineDefaultAudioSource::SetTrack(
+nsresult MediaEngineDefaultAudioSource::SetTrack(
     const RefPtr<const AllocationHandle>& aHandle,
     const RefPtr<SourceMediaStream>& aStream, TrackID aTrackID,
     const PrincipalHandle& aPrincipal) {
   AssertIsOnOwningThread();
 
   MOZ_ASSERT(mState == kAllocated);
   MOZ_ASSERT(!mStream);
   MOZ_ASSERT(mTrackID == TRACK_NONE);
 
   // AddAudioTrack will take ownership of segment
   mStream = aStream;
   mTrackID = aTrackID;
   aStream->AddAudioTrack(aTrackID, aStream->GraphRate(), new AudioSegment(),
                          SourceMediaStream::ADDTRACK_QUEUED);
+  return NS_OK;
 }
 
 nsresult MediaEngineDefaultAudioSource::Start(
     const RefPtr<const AllocationHandle>& aHandle) {
   AssertIsOnOwningThread();
 
   MOZ_ASSERT(mState == kAllocated || mState == kStopped);
   MOZ_ASSERT(mStream, "SetTrack() must happen before Start()");
   MOZ_ASSERT(IsTrackIDExplicit(mTrackID),
              "SetTrack() must happen before Start()");
 
   if (!mSineGenerator) {
     // generate sine wave (default 1KHz)
     mSineGenerator = new SineWaveGenerator(mStream->GraphRate(), mFreq);
   }
 
-  {
-    MutexAutoLock lock(mMutex);
-    mState = kStarted;
-  }
-
-  NS_DispatchToMainThread(
-      NS_NewRunnableFunction(__func__, [stream = mStream, track = mTrackID]() {
-        stream->SetPullingEnabled(track, true);
-      }));
-
+  MutexAutoLock lock(mMutex);
+  mState = kStarted;
   return NS_OK;
 }
 
 nsresult MediaEngineDefaultAudioSource::Stop(
     const RefPtr<const AllocationHandle>& aHandle) {
   AssertIsOnOwningThread();
 
   if (mState == kStopped || mState == kAllocated) {
     return NS_OK;
   }
 
   MOZ_ASSERT(mState == kStarted);
 
-  {
-    MutexAutoLock lock(mMutex);
-    mState = kStopped;
-  }
-
-  NS_DispatchToMainThread(
-      NS_NewRunnableFunction(__func__, [stream = mStream, track = mTrackID]() {
-        stream->SetPullingEnabled(track, false);
-      }));
+  MutexAutoLock lock(mMutex);
+  mState = kStopped;
   return NS_OK;
 }
 
 nsresult MediaEngineDefaultAudioSource::Reconfigure(
     const RefPtr<AllocationHandle>& aHandle,
     const dom::MediaTrackConstraints& aConstraints,
     const MediaEnginePrefs& aPrefs, const nsString& aDeviceId,
     const char** aOutBadConstraint) {
--- a/dom/media/webrtc/MediaEngineDefault.h
+++ b/dom/media/webrtc/MediaEngineDefault.h
@@ -42,19 +42,19 @@ class MediaEngineDefaultVideoSource : pu
   nsString GetName() const override;
   nsCString GetUUID() const override;
 
   nsresult Allocate(const dom::MediaTrackConstraints& aConstraints,
                     const MediaEnginePrefs& aPrefs, const nsString& aDeviceId,
                     const ipc::PrincipalInfo& aPrincipalInfo,
                     AllocationHandle** aOutHandle,
                     const char** aOutBadConstraint) override;
-  void SetTrack(const RefPtr<const AllocationHandle>& aHandle,
-                const RefPtr<SourceMediaStream>& aStream, TrackID aTrackID,
-                const PrincipalHandle& aPrincipal) override;
+  nsresult SetTrack(const RefPtr<const AllocationHandle>& aHandle,
+                    const RefPtr<SourceMediaStream>& aStream, TrackID aTrackID,
+                    const PrincipalHandle& aPrincipal) override;
   nsresult Start(const RefPtr<const AllocationHandle>& aHandle) override;
   nsresult Reconfigure(const RefPtr<AllocationHandle>& aHandle,
                        const dom::MediaTrackConstraints& aConstraints,
                        const MediaEnginePrefs& aPrefs,
                        const nsString& aDeviceId,
                        const char** aOutBadConstraint) override;
   nsresult Stop(const RefPtr<const AllocationHandle>& aHandle) override;
   nsresult Deallocate(const RefPtr<const AllocationHandle>& aHandle) override;
@@ -109,19 +109,19 @@ class MediaEngineDefaultAudioSource : pu
   nsString GetName() const override;
   nsCString GetUUID() const override;
 
   nsresult Allocate(const dom::MediaTrackConstraints& aConstraints,
                     const MediaEnginePrefs& aPrefs, const nsString& aDeviceId,
                     const ipc::PrincipalInfo& aPrincipalInfo,
                     AllocationHandle** aOutHandle,
                     const char** aOutBadConstraint) override;
-  void SetTrack(const RefPtr<const AllocationHandle>& aHandle,
-                const RefPtr<SourceMediaStream>& aStream, TrackID aTrackID,
-                const PrincipalHandle& aPrincipal) override;
+  nsresult SetTrack(const RefPtr<const AllocationHandle>& aHandle,
+                    const RefPtr<SourceMediaStream>& aStream, TrackID aTrackID,
+                    const PrincipalHandle& aPrincipal) override;
   nsresult Start(const RefPtr<const AllocationHandle>& aHandle) override;
   nsresult Reconfigure(const RefPtr<AllocationHandle>& aHandle,
                        const dom::MediaTrackConstraints& aConstraints,
                        const MediaEnginePrefs& aPrefs,
                        const nsString& aDeviceId,
                        const char** aOutBadConstraint) override;
   nsresult Stop(const RefPtr<const AllocationHandle>& aHandle) override;
   nsresult Deallocate(const RefPtr<const AllocationHandle>& aHandle) override;
--- a/dom/media/webrtc/MediaEngineRemoteVideoSource.cpp
+++ b/dom/media/webrtc/MediaEngineRemoteVideoSource.cpp
@@ -235,17 +235,17 @@ nsresult MediaEngineRemoteVideoSource::D
 
   if (camera::GetChildAndCall(&camera::CamerasChild::ReleaseCaptureDevice,
                               mCapEngine, mCaptureIndex)) {
     MOZ_ASSERT_UNREACHABLE("Couldn't release allocated device");
   }
   return NS_OK;
 }
 
-void MediaEngineRemoteVideoSource::SetTrack(
+nsresult MediaEngineRemoteVideoSource::SetTrack(
     const RefPtr<const AllocationHandle>& aHandle,
     const RefPtr<SourceMediaStream>& aStream, TrackID aTrackID,
     const PrincipalHandle& aPrincipal) {
   LOG(__PRETTY_FUNCTION__);
   AssertIsOnOwningThread();
 
   MOZ_ASSERT(mState == kAllocated);
   MOZ_ASSERT(!mStream);
@@ -261,16 +261,17 @@ void MediaEngineRemoteVideoSource::SetTr
   {
     MutexAutoLock lock(mMutex);
     mStream = aStream;
     mTrackID = aTrackID;
     mPrincipal = aPrincipal;
   }
   aStream->AddTrack(aTrackID, new VideoSegment(),
                     SourceMediaStream::ADDTRACK_QUEUED);
+  return NS_OK;
 }
 
 nsresult MediaEngineRemoteVideoSource::Start(
     const RefPtr<const AllocationHandle>& aHandle) {
   LOG(__PRETTY_FUNCTION__);
   AssertIsOnOwningThread();
 
   MOZ_ASSERT(mState == kAllocated || mState == kStopped);
--- a/dom/media/webrtc/MediaEngineRemoteVideoSource.h
+++ b/dom/media/webrtc/MediaEngineRemoteVideoSource.h
@@ -119,19 +119,19 @@ class MediaEngineRemoteVideoSource : pub
   // MediaEngineSource
   dom::MediaSourceEnum GetMediaSource() const override { return mMediaSource; }
   nsresult Allocate(const dom::MediaTrackConstraints& aConstraints,
                     const MediaEnginePrefs& aPrefs, const nsString& aDeviceId,
                     const ipc::PrincipalInfo& aPrincipalInfo,
                     AllocationHandle** aOutHandle,
                     const char** aOutBadConstraint) override;
   nsresult Deallocate(const RefPtr<const AllocationHandle>& aHandle) override;
-  void SetTrack(const RefPtr<const AllocationHandle>& aHandle,
-                const RefPtr<SourceMediaStream>& aStream, TrackID aTrackID,
-                const PrincipalHandle& aPrincipal) override;
+  nsresult SetTrack(const RefPtr<const AllocationHandle>& aHandle,
+                    const RefPtr<SourceMediaStream>& aStream, TrackID aTrackID,
+                    const PrincipalHandle& aPrincipal) override;
   nsresult Start(const RefPtr<const AllocationHandle>& aHandle) override;
   nsresult Reconfigure(const RefPtr<AllocationHandle>& aHandle,
                        const dom::MediaTrackConstraints& aConstraints,
                        const MediaEnginePrefs& aPrefs,
                        const nsString& aDeviceId,
                        const char** aOutBadConstraint) override;
   nsresult FocusOnSelectedSource(
       const RefPtr<const AllocationHandle>& aHandle) override;
--- a/dom/media/webrtc/MediaEngineSource.h
+++ b/dom/media/webrtc/MediaEngineSource.h
@@ -132,29 +132,27 @@ class MediaEngineSourceInterface {
                             const char** aOutBadConstraint) = 0;
 
   /**
    * Called by MediaEngine when a SourceMediaStream and TrackID have been
    * provided for the given AllocationHandle to feed data to.
    *
    * This must be called before Start for the given AllocationHandle.
    */
-  virtual void SetTrack(const RefPtr<const AllocationHandle>& aHandle,
-                        const RefPtr<SourceMediaStream>& aStream,
-                        TrackID aTrackID,
-                        const PrincipalHandle& aPrincipal) = 0;
+  virtual nsresult SetTrack(const RefPtr<const AllocationHandle>& aHandle,
+                            const RefPtr<SourceMediaStream>& aStream,
+                            TrackID aTrackID,
+                            const PrincipalHandle& aPrincipal) = 0;
 
   /**
    * Called by MediaEngine to start feeding data to the track associated with
    * the given AllocationHandle.
    *
    * If this is the first AllocationHandle to start, the underlying device
    * will be started.
-   *
-   * NB: Audio sources handle the enabling of pulling themselves.
    */
   virtual nsresult Start(const RefPtr<const AllocationHandle>& aHandle) = 0;
 
   /**
    * This brings focus to the selected source, e.g. to bring a captured window
    * to the front.
    *
    * We return one of the following:
@@ -193,18 +191,16 @@ class MediaEngineSourceInterface {
    * Called by MediaEngine to stop feeding data to the track associated with
    * the given AllocationHandle.
    *
    * If this was the last AllocationHandle that had been started,
    * the underlying device will be stopped.
    *
    * Double-stopping a given allocation handle is allowed and will return NS_OK.
    * This is necessary sometimes during shutdown.
-   *
-   * NB: Audio sources handle the disabling of pulling themselves.
    */
   virtual nsresult Stop(const RefPtr<const AllocationHandle>& aHandle) = 0;
 
   /**
    * Called by MediaEngine to deallocate a handle to this source.
    *
    * If this was the last registered AllocationHandle, the underlying device
    * will be deallocated.
--- a/dom/media/webrtc/MediaEngineTabVideoSource.cpp
+++ b/dom/media/webrtc/MediaEngineTabVideoSource.cpp
@@ -188,30 +188,31 @@ nsresult MediaEngineTabVideoSource::Deal
   {
     MutexAutoLock lock(mMutex);
     mState = kReleased;
   }
 
   return NS_OK;
 }
 
-void MediaEngineTabVideoSource::SetTrack(
+nsresult MediaEngineTabVideoSource::SetTrack(
     const RefPtr<const AllocationHandle>& aHandle,
     const RefPtr<SourceMediaStream>& aStream, TrackID aTrackID,
     const mozilla::PrincipalHandle& aPrincipal) {
   AssertIsOnOwningThread();
   MOZ_ASSERT(mState == kAllocated);
 
   MOZ_ASSERT(!mStream);
   MOZ_ASSERT(mTrackID == TRACK_NONE);
   MOZ_ASSERT(aStream);
   MOZ_ASSERT(IsTrackIDExplicit(aTrackID));
   mStream = aStream;
   mTrackID = aTrackID;
   mStream->AddTrack(mTrackID, new VideoSegment());
+  return NS_OK;
 }
 
 nsresult MediaEngineTabVideoSource::Start(
     const RefPtr<const AllocationHandle>& aHandle) {
   AssertIsOnOwningThread();
   MOZ_ASSERT(mState == kAllocated);
 
   nsCOMPtr<nsIRunnable> runnable;
--- a/dom/media/webrtc/MediaEngineTabVideoSource.h
+++ b/dom/media/webrtc/MediaEngineTabVideoSource.h
@@ -25,19 +25,19 @@ class MediaEngineTabVideoSource : public
   }
 
   nsresult Allocate(const dom::MediaTrackConstraints& aConstraints,
                     const MediaEnginePrefs& aPrefs, const nsString& aDeviceId,
                     const ipc::PrincipalInfo& aPrincipalInfo,
                     AllocationHandle** aOutHandle,
                     const char** aOutBadConstraint) override;
   nsresult Deallocate(const RefPtr<const AllocationHandle>& aHandle) override;
-  void SetTrack(const RefPtr<const AllocationHandle>& aHandle,
-                const RefPtr<SourceMediaStream>& aStream, TrackID aTrackID,
-                const PrincipalHandle& aPrincipal) override;
+  nsresult SetTrack(const RefPtr<const AllocationHandle>& aHandle,
+                    const RefPtr<SourceMediaStream>& aStream, TrackID aTrackID,
+                    const PrincipalHandle& aPrincipal) override;
   nsresult Start(const RefPtr<const AllocationHandle>& aHandle) override;
   nsresult Reconfigure(const RefPtr<AllocationHandle>& aHandle,
                        const dom::MediaTrackConstraints& aConstraints,
                        const MediaEnginePrefs& aPrefs,
                        const nsString& aDeviceId,
                        const char** aOutBadConstraint) override;
   nsresult FocusOnSelectedSource(
       const RefPtr<const AllocationHandle>& aHandle) override;
--- a/dom/media/webrtc/MediaEngineWebRTCAudio.cpp
+++ b/dom/media/webrtc/MediaEngineWebRTCAudio.cpp
@@ -198,18 +198,19 @@ void MediaEngineWebRTCMicrophoneSource::
 
 void MediaEngineWebRTCMicrophoneSource::UpdateAECSettings(
     bool aEnable, bool aUseAecMobile,
     EchoCancellation::SuppressionLevel aLevel) {
   AssertIsOnOwningThread();
 
   RefPtr<MediaEngineWebRTCMicrophoneSource> that = this;
   RefPtr<MediaStreamGraphImpl> gripGraph = mStream->GraphImpl();
-  NS_DispatchToMainThread(media::NewRunnableFrom(
-      [that, graph = std::move(gripGraph), aEnable, aUseAecMobile, aLevel]() {
+  NS_DispatchToMainThread(
+      media::NewRunnableFrom([that, graph = std::move(gripGraph), aEnable,
+                              aUseAecMobile, aLevel]() mutable {
         class Message : public ControlMessage {
          public:
           Message(AudioInputProcessing* aInputProcessing, bool aEnable,
                   bool aUseAecMobile, EchoCancellation::SuppressionLevel aLevel)
               : ControlMessage(nullptr),
                 mInputProcessing(aInputProcessing),
                 mEnable(aEnable),
                 mUseAecMobile(aUseAecMobile),
@@ -237,17 +238,17 @@ void MediaEngineWebRTCMicrophoneSource::
 
 void MediaEngineWebRTCMicrophoneSource::UpdateAGCSettings(
     bool aEnable, GainControl::Mode aMode) {
   AssertIsOnOwningThread();
 
   RefPtr<MediaEngineWebRTCMicrophoneSource> that = this;
   RefPtr<MediaStreamGraphImpl> gripGraph = mStream->GraphImpl();
   NS_DispatchToMainThread(media::NewRunnableFrom(
-      [that, graph = std::move(gripGraph), aEnable, aMode]() {
+      [that, graph = std::move(gripGraph), aEnable, aMode]() mutable {
         class Message : public ControlMessage {
          public:
           Message(AudioInputProcessing* aInputProcessing, bool aEnable,
                   GainControl::Mode aMode)
               : ControlMessage(nullptr),
                 mInputProcessing(aInputProcessing),
                 mEnable(aEnable),
                 mMode(aMode) {}
@@ -273,17 +274,17 @@ void MediaEngineWebRTCMicrophoneSource::
 
 void MediaEngineWebRTCMicrophoneSource::UpdateNSSettings(
     bool aEnable, webrtc::NoiseSuppression::Level aLevel) {
   AssertIsOnOwningThread();
 
   RefPtr<MediaEngineWebRTCMicrophoneSource> that = this;
   RefPtr<MediaStreamGraphImpl> gripGraph = mStream->GraphImpl();
   NS_DispatchToMainThread(media::NewRunnableFrom(
-      [that, graph = std::move(gripGraph), aEnable, aLevel]() {
+      [that, graph = std::move(gripGraph), aEnable, aLevel]() mutable {
         class Message : public ControlMessage {
          public:
           Message(AudioInputProcessing* aInputProcessing, bool aEnable,
                   webrtc::NoiseSuppression::Level aLevel)
               : ControlMessage(nullptr),
                 mInputProcessing(aInputProcessing),
                 mEnable(aEnable),
                 mLevel(aLevel) {}
@@ -308,18 +309,19 @@ void MediaEngineWebRTCMicrophoneSource::
 }
 
 void MediaEngineWebRTCMicrophoneSource::UpdateAPMExtraOptions(
     bool aExtendedFilter, bool aDelayAgnostic) {
   AssertIsOnOwningThread();
 
   RefPtr<MediaEngineWebRTCMicrophoneSource> that = this;
   RefPtr<MediaStreamGraphImpl> gripGraph = mStream->GraphImpl();
-  NS_DispatchToMainThread(media::NewRunnableFrom(
-      [that, graph = std::move(gripGraph), aExtendedFilter, aDelayAgnostic]() {
+  NS_DispatchToMainThread(
+      media::NewRunnableFrom([that, graph = std::move(gripGraph),
+                              aExtendedFilter, aDelayAgnostic]() mutable {
         class Message : public ControlMessage {
          public:
           Message(AudioInputProcessing* aInputProcessing, bool aExtendedFilter,
                   bool aDelayAgnostic)
               : ControlMessage(nullptr),
                 mInputProcessing(aInputProcessing),
                 mExtendedFilter(aExtendedFilter),
                 mDelayAgnostic(aDelayAgnostic) {}
@@ -363,17 +365,17 @@ void MediaEngineWebRTCMicrophoneSource::
         static_cast<webrtc::EchoCancellation::SuppressionLevel>(aPrefs.mAec));
 
     UpdateAPMExtraOptions(mExtendedFilter, mDelayAgnostic);
   }
 
   RefPtr<MediaEngineWebRTCMicrophoneSource> that = this;
   RefPtr<MediaStreamGraphImpl> graphImpl = mStream->GraphImpl();
   NS_DispatchToMainThread(media::NewRunnableFrom(
-      [that, graph = std::move(graphImpl), prefs = aPrefs]() {
+      [that, graph = std::move(graphImpl), prefs = aPrefs]() mutable {
         that->mSettings->mEchoCancellation.Value() = prefs.mAecOn;
         that->mSettings->mAutoGainControl.Value() = prefs.mAgcOn;
         that->mSettings->mNoiseSuppression.Value() = prefs.mNoiseOn;
         that->mSettings->mChannelCount.Value() = prefs.mChannels;
 
         class Message : public ControlMessage {
          public:
           Message(AudioInputProcessing* aInputProcessing, bool aPassThrough,
@@ -421,23 +423,24 @@ nsresult MediaEngineWebRTCMicrophoneSour
   MediaEnginePrefs outputPrefs;
   nsresult rv =
       EvaluateSettings(normalized, aPrefs, &outputPrefs, aOutBadConstraint);
   if (NS_FAILED(rv)) {
     return rv;
   }
 
   RefPtr<MediaEngineWebRTCMicrophoneSource> that = this;
-  NS_DispatchToMainThread(media::NewRunnableFrom([that, prefs = outputPrefs]() {
-    that->mSettings->mEchoCancellation.Value() = prefs.mAecOn;
-    that->mSettings->mAutoGainControl.Value() = prefs.mAgcOn;
-    that->mSettings->mNoiseSuppression.Value() = prefs.mNoiseOn;
-    that->mSettings->mChannelCount.Value() = prefs.mChannels;
-    return NS_OK;
-  }));
+  NS_DispatchToMainThread(
+      media::NewRunnableFrom([that, prefs = outputPrefs]() mutable {
+        that->mSettings->mEchoCancellation.Value() = prefs.mAecOn;
+        that->mSettings->mAutoGainControl.Value() = prefs.mAgcOn;
+        that->mSettings->mNoiseSuppression.Value() = prefs.mNoiseOn;
+        that->mSettings->mChannelCount.Value() = prefs.mChannels;
+        return NS_OK;
+      }));
 
   mCurrentPrefs = outputPrefs;
 
   return rv;
 }
 
 nsresult MediaEngineWebRTCMicrophoneSource::Deallocate(
     const RefPtr<const AllocationHandle>&) {
@@ -465,17 +468,17 @@ nsresult MediaEngineWebRTCMicrophoneSour
   };
 
   if (mStream && IsTrackIDExplicit(mTrackID)) {
     RefPtr<MediaStream> sourceStream = mStream;
     RefPtr<AudioInputProcessing> inputProcessing = mInputProcessing;
     NS_DispatchToMainThread(media::NewRunnableFrom(
         [stream = std::move(sourceStream),
          audioInputProcessing = std::move(inputProcessing),
-         trackID = mTrackID]() {
+         trackID = mTrackID]() mutable {
           if (stream->IsDestroyed()) {
             // This stream has already been destroyed on main thread by its
             // DOMMediaStream. No cleanup left to do.
             return NS_OK;
           }
           MOZ_ASSERT(stream->GraphImpl());
           stream->GraphImpl()->AppendMessage(MakeUnique<EndTrackMessage>(
               stream, audioInputProcessing, trackID));
@@ -495,37 +498,42 @@ nsresult MediaEngineWebRTCMicrophoneSour
   MOZ_ASSERT(mState != kReleased, "Source not allocated");
   MOZ_ASSERT(mState != kStarted, "Source not stopped");
 
   mState = kReleased;
   LOG("Audio device %s deallocated", NS_ConvertUTF16toUTF8(mDeviceName).get());
   return NS_OK;
 }
 
-void MediaEngineWebRTCMicrophoneSource::SetTrack(
+nsresult MediaEngineWebRTCMicrophoneSource::SetTrack(
     const RefPtr<const AllocationHandle>&,
     const RefPtr<SourceMediaStream>& aStream, TrackID aTrackID,
     const PrincipalHandle& aPrincipal) {
   AssertIsOnOwningThread();
   MOZ_ASSERT(aStream);
   MOZ_ASSERT(IsTrackIDExplicit(aTrackID));
 
+  if (mStream && mStream->Graph() != aStream->Graph()) {
+    return NS_ERROR_NOT_AVAILABLE;
+  }
+
   MOZ_ASSERT(!mStream);
   MOZ_ASSERT(mTrackID == TRACK_NONE);
   MOZ_ASSERT(mPrincipal == PRINCIPAL_HANDLE_NONE);
   mStream = aStream;
   mTrackID = aTrackID;
   mPrincipal = aPrincipal;
 
   AudioSegment* segment = new AudioSegment();
 
   aStream->AddAudioTrack(aTrackID, aStream->GraphRate(), segment,
                          SourceMediaStream::ADDTRACK_QUEUED);
 
   LOG("Stream %p registered for microphone capture", aStream.get());
+  return NS_OK;
 }
 
 class StartStopMessage : public ControlMessage {
  public:
   enum StartStop { Start, Stop };
 
   StartStopMessage(AudioInputProcessing* aInputProcessing, StartStop aAction)
       : ControlMessage(nullptr),
@@ -566,26 +574,24 @@ nsresult MediaEngineWebRTCMicrophoneSour
     return NS_ERROR_FAILURE;
   }
 
   mInputProcessing = new AudioInputProcessing(mDeviceMaxChannelCount, mStream,
                                               mTrackID, mPrincipal);
 
   RefPtr<MediaEngineWebRTCMicrophoneSource> that = this;
   RefPtr<MediaStreamGraphImpl> gripGraph = mStream->GraphImpl();
-  NS_DispatchToMainThread(
-      media::NewRunnableFrom([that, graph = std::move(gripGraph), deviceID,
-                              stream = mStream, track = mTrackID]() {
+  NS_DispatchToMainThread(media::NewRunnableFrom(
+      [that, graph = std::move(gripGraph), deviceID]() mutable {
         if (graph) {
           graph->AppendMessage(MakeUnique<StartStopMessage>(
               that->mInputProcessing, StartStopMessage::Start));
         }
 
-        stream->OpenAudioInput(deviceID, that->mInputProcessing);
-        stream->SetPullingEnabled(track, true);
+        that->mStream->OpenAudioInput(deviceID, that->mInputProcessing);
 
         return NS_OK;
       }));
 
   ApplySettings(mCurrentPrefs);
 
   MOZ_ASSERT(mState != kReleased);
   mState = kStarted;
@@ -602,28 +608,26 @@ nsresult MediaEngineWebRTCMicrophoneSour
 
   if (mState == kStopped) {
     // Already stopped - this is allowed
     return NS_OK;
   }
 
   RefPtr<MediaEngineWebRTCMicrophoneSource> that = this;
   RefPtr<MediaStreamGraphImpl> gripGraph = mStream->GraphImpl();
-  NS_DispatchToMainThread(
-      media::NewRunnableFrom([that, graph = std::move(gripGraph),
-                              stream = mStream, track = mTrackID]() {
+  NS_DispatchToMainThread(media::NewRunnableFrom(
+      [that, graph = std::move(gripGraph), stream = mStream]() mutable {
         if (graph) {
           graph->AppendMessage(MakeUnique<StartStopMessage>(
               that->mInputProcessing, StartStopMessage::Stop));
         }
 
         CubebUtils::AudioDeviceID deviceID = that->mDeviceInfo->DeviceID();
         Maybe<CubebUtils::AudioDeviceID> id = Some(deviceID);
         stream->CloseAudioInput(id, that->mInputProcessing);
-        stream->SetPullingEnabled(track, false);
 
         return NS_OK;
       }));
 
   MOZ_ASSERT(mState == kStarted, "Should be started when stopping");
   mState = kStopped;
 
   return NS_OK;
@@ -1155,22 +1159,23 @@ nsCString MediaEngineWebRTCAudioCaptureS
 
   uuid.ToProvidedString(uuidBuffer);
   asciiString.AssignASCII(uuidBuffer);
 
   // Remove {} and the null terminator
   return nsCString(Substring(asciiString, 1, NSID_LENGTH - 3));
 }
 
-void MediaEngineWebRTCAudioCaptureSource::SetTrack(
+nsresult MediaEngineWebRTCAudioCaptureSource::SetTrack(
     const RefPtr<const AllocationHandle>&,
     const RefPtr<SourceMediaStream>& aStream, TrackID aTrackID,
     const PrincipalHandle& aPrincipalHandle) {
   AssertIsOnOwningThread();
   // Nothing to do here. aStream is a placeholder dummy and not exposed.
+  return NS_OK;
 }
 
 nsresult MediaEngineWebRTCAudioCaptureSource::Start(
     const RefPtr<const AllocationHandle>&) {
   AssertIsOnOwningThread();
   return NS_OK;
 }
 
--- a/dom/media/webrtc/MediaEngineWebRTCAudio.h
+++ b/dom/media/webrtc/MediaEngineWebRTCAudio.h
@@ -38,19 +38,19 @@ class MediaEngineWebRTCMicrophoneSource 
   nsCString GetUUID() const override;
 
   nsresult Allocate(const dom::MediaTrackConstraints& aConstraints,
                     const MediaEnginePrefs& aPrefs, const nsString& aDeviceId,
                     const ipc::PrincipalInfo& aPrincipalInfo,
                     AllocationHandle** aOutHandle,
                     const char** aOutBadConstraint) override;
   nsresult Deallocate(const RefPtr<const AllocationHandle>& aHandle) override;
-  void SetTrack(const RefPtr<const AllocationHandle>& aHandle,
-                const RefPtr<SourceMediaStream>& aStream, TrackID aTrackID,
-                const PrincipalHandle& aPrincipal) override;
+  nsresult SetTrack(const RefPtr<const AllocationHandle>& aHandle,
+                    const RefPtr<SourceMediaStream>& aStream, TrackID aTrackID,
+                    const PrincipalHandle& aPrincipal) override;
   nsresult Start(const RefPtr<const AllocationHandle>& aHandle) override;
   nsresult Stop(const RefPtr<const AllocationHandle>& aHandle) override;
   nsresult Reconfigure(const RefPtr<AllocationHandle>& aHandle,
                        const dom::MediaTrackConstraints& aConstraints,
                        const MediaEnginePrefs& aPrefs,
                        const nsString& aDeviceId,
                        const char** aOutBadConstraint) override;
 
@@ -264,19 +264,19 @@ class MediaEngineWebRTCAudioCaptureSourc
     *aOutHandle = nullptr;
     return NS_OK;
   }
   nsresult Deallocate(const RefPtr<const AllocationHandle>& aHandle) override {
     // Nothing to do here, everything is managed in MediaManager.cpp
     MOZ_ASSERT(!aHandle);
     return NS_OK;
   }
-  void SetTrack(const RefPtr<const AllocationHandle>& aHandle,
-                const RefPtr<SourceMediaStream>& aStream, TrackID aTrackID,
-                const PrincipalHandle& aPrincipal) override;
+  nsresult SetTrack(const RefPtr<const AllocationHandle>& aHandle,
+                    const RefPtr<SourceMediaStream>& aStream, TrackID aTrackID,
+                    const PrincipalHandle& aPrincipal) override;
   nsresult Start(const RefPtr<const AllocationHandle>& aHandle) override;
   nsresult Stop(const RefPtr<const AllocationHandle>& aHandle) override;
   nsresult Reconfigure(const RefPtr<AllocationHandle>& aHandle,
                        const dom::MediaTrackConstraints& aConstraints,
                        const MediaEnginePrefs& aPrefs,
                        const nsString& aDeviceId,
                        const char** aOutBadConstraint) override;