Bug 1341555 - Consolidate use of the MSG's AbstractMainThread, and stop having AbstractMainThread on MediaStreams. r=pehrsons
authorPaul Adenot <paul@paul.cx>
Thu, 29 Jun 2017 11:30:57 -0700
changeset 366746 5c2fea87df168bdcbfe807ffedfb6d7c1ea4cb4e
parent 366745 e0c4f2ca44b4292881a56fab6bf584bf7d966299
child 366747 29ac12c81bcda10a981dfe3371f70a9b17a19edd
push id92039
push userpaul@paul.cx
push dateThu, 29 Jun 2017 20:39:37 +0000
treeherdermozilla-inbound@29ac12c81bcd [default view] [failures only]
perfherder[talos] [build metrics] [platform microbench] (compared to previous push)
reviewerspehrsons
bugs1341555
milestone56.0a1
first release with
nightly linux32
nightly linux64
nightly mac
nightly win32
nightly win64
last release without
nightly linux32
nightly linux64
nightly mac
nightly win32
nightly win64
Bug 1341555 - Consolidate use of the MSG's AbstractMainThread, and stop having AbstractMainThread on MediaStreams. r=pehrsons MozReview-Commit-ID: 5hGDQcfpH6a
dom/html/HTMLMediaElement.cpp
dom/media/AudioCaptureStream.cpp
dom/media/AudioCaptureStream.h
dom/media/DOMMediaStream.cpp
dom/media/DOMMediaStream.h
dom/media/MediaManager.cpp
dom/media/MediaRecorder.cpp
dom/media/MediaRecorder.h
dom/media/MediaStreamGraph.cpp
dom/media/MediaStreamGraph.h
dom/media/MediaStreamGraphImpl.h
dom/media/MediaStreamTrack.cpp
dom/media/TrackUnionStream.cpp
dom/media/TrackUnionStream.h
dom/media/mediasink/DecodedStream.cpp
dom/media/webaudio/AudioDestinationNode.cpp
dom/media/webaudio/AudioNodeExternalInputStream.cpp
dom/media/webaudio/AudioNodeExternalInputStream.h
dom/media/webaudio/AudioNodeStream.cpp
dom/media/webaudio/AudioNodeStream.h
dom/media/webaudio/BiquadFilterNode.cpp
dom/media/webaudio/ConvolverNode.cpp
dom/media/webaudio/DelayNode.cpp
dom/media/webaudio/IIRFilterNode.cpp
dom/media/webaudio/MediaStreamAudioSourceNode.cpp
dom/media/webaudio/PannerNode.cpp
dom/media/webspeech/synth/nsSpeechTask.cpp
media/webrtc/signaling/gtest/mediapipeline_unittest.cpp
--- a/dom/html/HTMLMediaElement.cpp
+++ b/dom/html/HTMLMediaElement.cpp
@@ -4864,18 +4864,17 @@ class HTMLMediaElement::StreamListener :
 public:
   StreamListener(HTMLMediaElement* aElement, const char* aName) :
     WatchTarget(aName),
     mElement(aElement),
     mHaveCurrentData(false),
     mBlocked(false),
     mFinished(false),
     mMutex(aName),
-    mPendingNotifyOutput(false),
-    mAbstractMainThread(aElement->AbstractMainThread())
+    mPendingNotifyOutput(false)
   {}
   void Forget()
   {
     mElement = nullptr;
     NotifyWatchers();
   }
 
   // Main thread
@@ -4933,54 +4932,50 @@ public:
         this,
         &StreamListener::DoNotifyBlocked);
     } else {
       event = NewRunnableMethod(
         "dom::HTMLMediaElement::StreamListener::DoNotifyUnblocked",
         this,
         &StreamListener::DoNotifyUnblocked);
     }
-    aGraph->DispatchToMainThreadAfterStreamStateUpdate(mAbstractMainThread,
-                                                       event.forget());
+    aGraph->DispatchToMainThreadAfterStreamStateUpdate(event.forget());
   }
   virtual void NotifyHasCurrentData(MediaStreamGraph* aGraph) override
   {
     MutexAutoLock lock(mMutex);
     aGraph->DispatchToMainThreadAfterStreamStateUpdate(
-      mAbstractMainThread,
       NewRunnableMethod(
         "dom::HTMLMediaElement::StreamListener::DoNotifyHaveCurrentData",
         this,
         &StreamListener::DoNotifyHaveCurrentData));
   }
   virtual void NotifyOutput(MediaStreamGraph* aGraph,
                             GraphTime aCurrentTime) override
   {
     MutexAutoLock lock(mMutex);
     if (mPendingNotifyOutput)
       return;
     mPendingNotifyOutput = true;
     aGraph->DispatchToMainThreadAfterStreamStateUpdate(
-      mAbstractMainThread,
       NewRunnableMethod("dom::HTMLMediaElement::StreamListener::DoNotifyOutput",
                         this,
                         &StreamListener::DoNotifyOutput));
   }
 
 private:
   // These fields may only be accessed on the main thread
   HTMLMediaElement* mElement;
   bool mHaveCurrentData;
   bool mBlocked;
   bool mFinished;
 
   // mMutex protects the fields below; they can be accessed on any thread
   Mutex mMutex;
   bool mPendingNotifyOutput;
-  const RefPtr<AbstractThread> mAbstractMainThread;
 };
 
 class HTMLMediaElement::MediaStreamTracksAvailableCallback:
   public OnTracksAvailableCallback
 {
 public:
   explicit MediaStreamTracksAvailableCallback(HTMLMediaElement* aElement):
       OnTracksAvailableCallback(), mElement(aElement)
--- a/dom/media/AudioCaptureStream.cpp
+++ b/dom/media/AudioCaptureStream.cpp
@@ -25,18 +25,21 @@ using namespace mozilla::dom;
 using namespace mozilla::gfx;
 
 namespace mozilla
 {
 
 // We are mixing to mono until PeerConnection can accept stereo
 static const uint32_t MONO = 1;
 
-AudioCaptureStream::AudioCaptureStream(TrackID aTrackId, AbstractThread* aMainThread)
-  : ProcessedMediaStream(aMainThread), mTrackId(aTrackId), mStarted(false), mTrackCreated(false)
+AudioCaptureStream::AudioCaptureStream(TrackID aTrackId)
+  : ProcessedMediaStream()
+  , mTrackId(aTrackId)
+  , mStarted(false)
+  , mTrackCreated(false)
 {
   MOZ_ASSERT(NS_IsMainThread());
   MOZ_COUNT_CTOR(AudioCaptureStream);
   mMixer.AddCallback(this);
 }
 
 AudioCaptureStream::~AudioCaptureStream()
 {
--- a/dom/media/AudioCaptureStream.h
+++ b/dom/media/AudioCaptureStream.h
@@ -19,17 +19,17 @@ class DOMMediaStream;
 
 /**
  * See MediaStreamGraph::CreateAudioCaptureStream.
  */
 class AudioCaptureStream : public ProcessedMediaStream,
                            public MixerCallbackReceiver
 {
 public:
-  AudioCaptureStream(TrackID aTrackId, AbstractThread* aMainThread);
+  AudioCaptureStream(TrackID aTrackId);
   virtual ~AudioCaptureStream();
 
   void Start();
 
   void ProcessInput(GraphTime aFrom, GraphTime aTo, uint32_t aFlags) override;
 
 protected:
   void MixerCallback(AudioDataValue* aMixedBuffer, AudioSampleFormat aFormat,
--- a/dom/media/DOMMediaStream.cpp
+++ b/dom/media/DOMMediaStream.cpp
@@ -138,17 +138,16 @@ NS_IMPL_CYCLE_COLLECTION_0(MediaStreamTr
  * Listener registered on the Owned stream to detect added and ended owned
  * tracks for keeping the list of MediaStreamTracks in sync with the tracks
  * added and ended directly at the source.
  */
 class DOMMediaStream::OwnedStreamListener : public MediaStreamListener {
 public:
   explicit OwnedStreamListener(DOMMediaStream* aStream)
     : mStream(aStream)
-    , mAbstractMainThread(aStream->mAbstractMainThread)
   {}
 
   void Forget() { mStream = nullptr; }
 
   void DoNotifyTrackCreated(TrackID aTrackID, MediaSegment::Type aType,
                             MediaStream* aInputStream, TrackID aInputTrackID)
   {
     MOZ_ASSERT(NS_IsMainThread());
@@ -219,58 +218,53 @@ public:
   void NotifyQueuedTrackChanges(MediaStreamGraph* aGraph, TrackID aID,
                                 StreamTime aTrackOffset, TrackEventCommand aTrackEvents,
                                 const MediaSegment& aQueuedMedia,
                                 MediaStream* aInputStream,
                                 TrackID aInputTrackID) override
   {
     if (aTrackEvents & TrackEventCommand::TRACK_EVENT_CREATED) {
       aGraph->DispatchToMainThreadAfterStreamStateUpdate(
-        mAbstractMainThread,
         NewRunnableMethod<TrackID,
                           MediaSegment::Type,
                           RefPtr<MediaStream>,
                           TrackID>(
           "DOMMediaStream::OwnedStreamListener::DoNotifyTrackCreated",
           this,
           &OwnedStreamListener::DoNotifyTrackCreated,
           aID,
           aQueuedMedia.GetType(),
           aInputStream,
           aInputTrackID));
     } else if (aTrackEvents & TrackEventCommand::TRACK_EVENT_ENDED) {
       aGraph->DispatchToMainThreadAfterStreamStateUpdate(
-        mAbstractMainThread,
         NewRunnableMethod<RefPtr<MediaStream>, TrackID, TrackID>(
           "DOMMediaStream::OwnedStreamListener::DoNotifyTrackEnded",
           this,
           &OwnedStreamListener::DoNotifyTrackEnded,
           aInputStream,
           aInputTrackID,
           aID));
     }
   }
 
 private:
   // These fields may only be accessed on the main thread
   DOMMediaStream* mStream;
-
-  const RefPtr<AbstractThread> mAbstractMainThread;
 };
 
 /**
  * Listener registered on the Playback stream to detect when tracks end and when
  * all new tracks this iteration have been created - for when several tracks are
  * queued by the source and committed all at once.
  */
 class DOMMediaStream::PlaybackStreamListener : public MediaStreamListener {
 public:
   explicit PlaybackStreamListener(DOMMediaStream* aStream)
     : mStream(aStream)
-    , mAbstractMainThread(aStream->mAbstractMainThread)
   {}
 
   void Forget()
   {
     MOZ_ASSERT(NS_IsMainThread());
     mStream = nullptr;
   }
 
@@ -304,42 +298,38 @@ public:
                                               &DOMMediaStream::NotifyFinished));
   }
 
   // The methods below are called on the MediaStreamGraph thread.
 
   void NotifyFinishedTrackCreation(MediaStreamGraph* aGraph) override
   {
     aGraph->DispatchToMainThreadAfterStreamStateUpdate(
-      mAbstractMainThread,
       NewRunnableMethod(
         "DOMMediaStream::PlaybackStreamListener::DoNotifyFinishedTrackCreation",
         this,
         &PlaybackStreamListener::DoNotifyFinishedTrackCreation));
   }
 
 
   void NotifyEvent(MediaStreamGraph* aGraph,
                    MediaStreamGraphEvent event) override
   {
     if (event == MediaStreamGraphEvent::EVENT_FINISHED) {
       aGraph->DispatchToMainThreadAfterStreamStateUpdate(
-        mAbstractMainThread,
         NewRunnableMethod(
           "DOMMediaStream::PlaybackStreamListener::DoNotifyFinished",
           this,
           &PlaybackStreamListener::DoNotifyFinished));
     }
   }
 
 private:
   // These fields may only be accessed on the main thread
   DOMMediaStream* mStream;
-
-  const RefPtr<AbstractThread> mAbstractMainThread;
 };
 
 class DOMMediaStream::PlaybackTrackListener : public MediaStreamTrackConsumer
 {
 public:
   explicit PlaybackTrackListener(DOMMediaStream* aStream) :
     mStream(aStream) {}
 
@@ -431,18 +421,17 @@ NS_INTERFACE_MAP_END_INHERITING(DOMMedia
 
 DOMMediaStream::DOMMediaStream(nsPIDOMWindowInner* aWindow,
                                MediaStreamTrackSourceGetter* aTrackSourceGetter)
   : mLogicalStreamStartTime(0), mWindow(aWindow),
     mInputStream(nullptr), mOwnedStream(nullptr), mPlaybackStream(nullptr),
     mTracksPendingRemoval(0), mTrackSourceGetter(aTrackSourceGetter),
     mPlaybackTrackListener(MakeAndAddRef<PlaybackTrackListener>(this)),
     mTracksCreated(false), mNotifiedOfMediaStreamGraphShutdown(false),
-    mActive(false), mSetInactiveOnFinish(false),
-    mAbstractMainThread(aWindow ? aWindow->GetDocGroup()->AbstractMainThreadFor(TaskCategory::Other) : nullptr)
+    mActive(false), mSetInactiveOnFinish(false)
 {
   nsresult rv;
   nsCOMPtr<nsIUUIDGenerator> uuidgen =
     do_GetService("@mozilla.org/uuid-generator;1", &rv);
 
   if (NS_SUCCEEDED(rv) && uuidgen) {
     nsID uuid;
     memset(&uuid, 0, sizeof(uuid));
@@ -876,42 +865,39 @@ void
 DOMMediaStream::SetInactiveOnFinish()
 {
   mSetInactiveOnFinish = true;
 }
 
 void
 DOMMediaStream::InitSourceStream(MediaStreamGraph* aGraph)
 {
-  MOZ_ASSERT(mAbstractMainThread);
-  InitInputStreamCommon(aGraph->CreateSourceStream(mAbstractMainThread), aGraph);
+  InitInputStreamCommon(aGraph->CreateSourceStream(), aGraph);
   InitOwnedStreamCommon(aGraph);
   InitPlaybackStreamCommon(aGraph);
 }
 
 void
 DOMMediaStream::InitTrackUnionStream(MediaStreamGraph* aGraph)
 {
-  MOZ_ASSERT(mAbstractMainThread);
-  InitInputStreamCommon(aGraph->CreateTrackUnionStream(mAbstractMainThread), aGraph);
+  InitInputStreamCommon(aGraph->CreateTrackUnionStream(), aGraph);
   InitOwnedStreamCommon(aGraph);
   InitPlaybackStreamCommon(aGraph);
 }
 
 void
 DOMMediaStream::InitAudioCaptureStream(nsIPrincipal* aPrincipal, MediaStreamGraph* aGraph)
 {
-  MOZ_ASSERT(mAbstractMainThread);
   const TrackID AUDIO_TRACK = 1;
 
   RefPtr<BasicTrackSource> audioCaptureSource =
     new BasicTrackSource(aPrincipal, MediaSourceEnum::AudioCapture);
 
   AudioCaptureStream* audioCaptureStream =
-    static_cast<AudioCaptureStream*>(aGraph->CreateAudioCaptureStream(AUDIO_TRACK, mAbstractMainThread));
+    static_cast<AudioCaptureStream*>(aGraph->CreateAudioCaptureStream(AUDIO_TRACK));
   InitInputStreamCommon(audioCaptureStream, aGraph);
   InitOwnedStreamCommon(aGraph);
   InitPlaybackStreamCommon(aGraph);
   RefPtr<MediaStreamTrack> track =
     CreateDOMTrack(AUDIO_TRACK, MediaSegment::AUDIO, audioCaptureSource);
   AddTrackInternal(track);
 
   audioCaptureStream->Start();
@@ -925,36 +911,34 @@ DOMMediaStream::InitInputStreamCommon(Me
 
   mInputStream = aStream;
   mInputStream->RegisterUser();
 }
 
 void
 DOMMediaStream::InitOwnedStreamCommon(MediaStreamGraph* aGraph)
 {
-  MOZ_ASSERT(mAbstractMainThread);
   MOZ_ASSERT(!mPlaybackStream, "Owned stream must be initialized before playback stream");
 
-  mOwnedStream = aGraph->CreateTrackUnionStream(mAbstractMainThread);
+  mOwnedStream = aGraph->CreateTrackUnionStream();
   mOwnedStream->SetAutofinish(true);
   mOwnedStream->RegisterUser();
   if (mInputStream) {
     mOwnedPort = mOwnedStream->AllocateInputPort(mInputStream);
   }
 
   // Setup track listeners
   mOwnedListener = new OwnedStreamListener(this);
   mOwnedStream->AddListener(mOwnedListener);
 }
 
 void
 DOMMediaStream::InitPlaybackStreamCommon(MediaStreamGraph* aGraph)
 {
-  MOZ_ASSERT(mAbstractMainThread);
-  mPlaybackStream = aGraph->CreateTrackUnionStream(mAbstractMainThread);
+  mPlaybackStream = aGraph->CreateTrackUnionStream();
   mPlaybackStream->SetAutofinish(true);
   mPlaybackStream->RegisterUser();
   if (mOwnedStream) {
     mPlaybackPort = mPlaybackStream->AllocateInputPort(mOwnedStream);
   }
 
   mPlaybackListener = new PlaybackStreamListener(this);
   mPlaybackStream->AddListener(mPlaybackListener);
--- a/dom/media/DOMMediaStream.h
+++ b/dom/media/DOMMediaStream.h
@@ -585,18 +585,16 @@ public:
   // being destroyed, so we don't hold on to a dead pointer. Main thread only.
   void RegisterTrackListener(TrackListener* aListener);
 
   // Unregisters a track listener from this MediaStream. The caller must call
   // UnregisterTrackListener before being destroyed, so we don't hold on to
   // a dead pointer. Main thread only.
   void UnregisterTrackListener(TrackListener* aListener);
 
-  AbstractThread* AbstractMainThread() const { return mAbstractMainThread; }
-
 protected:
   virtual ~DOMMediaStream();
 
   void Destroy();
   void InitSourceStream(MediaStreamGraph* aGraph);
   void InitTrackUnionStream(MediaStreamGraph* aGraph);
   void InitAudioCaptureStream(nsIPrincipal* aPrincipal, MediaStreamGraph* aGraph);
 
@@ -751,17 +749,16 @@ private:
   // Principal identifying who may access the collected contents of this stream.
   // If null, this stream can be used by anyone because it has no content yet.
   nsCOMPtr<nsIPrincipal> mPrincipal;
   // Video principal is used by video element as access is requested to its
   // image data.
   nsCOMPtr<nsIPrincipal> mVideoPrincipal;
   nsTArray<dom::PrincipalChangeObserver<DOMMediaStream>*> mPrincipalChangeObservers;
   CORSMode mCORSMode;
-  const RefPtr<AbstractThread> mAbstractMainThread;
 };
 
 NS_DEFINE_STATIC_IID_ACCESSOR(DOMMediaStream,
                               NS_DOMMEDIASTREAM_IID)
 
 #define NS_DOMLOCALMEDIASTREAM_IID \
 { 0xb1437260, 0xec61, 0x4dfa, \
   { 0x92, 0x54, 0x04, 0x44, 0xe2, 0xb5, 0x94, 0x9c } }
--- a/dom/media/MediaManager.cpp
+++ b/dom/media/MediaManager.cpp
@@ -1079,18 +1079,17 @@ public:
     if (mAudioDevice &&
         mAudioDevice->GetMediaSource() == MediaSourceEnum::AudioCapture) {
       // It should be possible to pipe the capture stream to anything. CORS is
       // not a problem here, we got explicit user content.
       nsCOMPtr<nsIPrincipal> principal = window->GetExtantDoc()->NodePrincipal();
       domStream =
         DOMMediaStream::CreateAudioCaptureStreamAsInput(window, principal, msg);
 
-      stream = msg->CreateSourceStream(
-        globalWindow->AbstractMainThreadFor(TaskCategory::Other)); // Placeholder
+      stream = msg->CreateSourceStream(); // Placeholder
       msg->RegisterCaptureStreamForWindow(
             mWindowID, domStream->GetInputStream()->AsProcessedStream());
       window->SetAudioCapture(true);
     } else {
       class LocalTrackSource : public MediaStreamTrackSource
       {
       public:
         LocalTrackSource(nsIPrincipal* aPrincipal,
--- a/dom/media/MediaRecorder.cpp
+++ b/dom/media/MediaRecorder.cpp
@@ -428,17 +428,16 @@ class MediaRecorder::Session: public nsI
 public:
   Session(MediaRecorder* aRecorder, int32_t aTimeSlice)
     : mRecorder(aRecorder)
     , mTimeSlice(aTimeSlice)
     , mStopIssued(false)
     , mIsStartEventFired(false)
     , mNeedSessionEndTask(true)
     , mSelectedVideoTrackID(TRACK_NONE)
-    , mAbstractMainThread(aRecorder->mAbstractMainThread)
   {
     MOZ_ASSERT(NS_IsMainThread());
 
     uint32_t maxMem = Preferences::GetUint("media.recorder.max_memory",
                                            MAX_ALLOW_MEMORY_BUFFER);
     mEncodedBufferCache = new EncodedBufferCache(maxMem);
     mLastBlobTimeStamp = TimeStamp::Now();
   }
@@ -484,17 +483,17 @@ public:
   void Start()
   {
     LOG(LogLevel::Debug, ("Session.Start %p", this));
     MOZ_ASSERT(NS_IsMainThread());
 
     // Create a Track Union Stream
     MediaStreamGraph* gm = mRecorder->GetSourceMediaStream()->Graph();
     TrackRate trackRate = gm->GraphRate();
-    mTrackUnionStream = gm->CreateTrackUnionStream(mAbstractMainThread);
+    mTrackUnionStream = gm->CreateTrackUnionStream();
     MOZ_ASSERT(mTrackUnionStream, "CreateTrackUnionStream failed");
 
     mTrackUnionStream->SetAutofinish(true);
 
     DOMMediaStream* domStream = mRecorder->Stream();
     if (domStream) {
       // Get the available tracks from the DOMMediaStream.
       // The callback will report back tracks that we have to connect to
@@ -947,17 +946,16 @@ private:
   bool mStopIssued;
   // Indicate the session had fire start event. Encoding thread only.
   bool mIsStartEventFired;
   // False if the InitEncoder called successfully, ensure the
   // ExtractRunnable/DestroyRunnable will end the session.
   // Main thread only.
   bool mNeedSessionEndTask;
   TrackID mSelectedVideoTrackID;
-  const RefPtr<AbstractThread> mAbstractMainThread;
 };
 
 NS_IMPL_ISUPPORTS(MediaRecorder::Session, nsIObserver)
 
 MediaRecorder::~MediaRecorder()
 {
   if (mPipeStream != nullptr) {
     mInputPort->Destroy();
@@ -966,31 +964,29 @@ MediaRecorder::~MediaRecorder()
   LOG(LogLevel::Debug, ("~MediaRecorder (%p)", this));
   UnRegisterActivityObserver();
 }
 
 MediaRecorder::MediaRecorder(DOMMediaStream& aSourceMediaStream,
                              nsPIDOMWindowInner* aOwnerWindow)
   : DOMEventTargetHelper(aOwnerWindow)
   , mState(RecordingState::Inactive)
-  , mAbstractMainThread(aSourceMediaStream.AbstractMainThread())
 {
   MOZ_ASSERT(aOwnerWindow);
   MOZ_ASSERT(aOwnerWindow->IsInnerWindow());
   mDOMStream = &aSourceMediaStream;
 
   RegisterActivityObserver();
 }
 
 MediaRecorder::MediaRecorder(AudioNode& aSrcAudioNode,
                              uint32_t aSrcOutput,
                              nsPIDOMWindowInner* aOwnerWindow)
   : DOMEventTargetHelper(aOwnerWindow)
   , mState(RecordingState::Inactive)
-  , mAbstractMainThread(aSrcAudioNode.AbstractMainThread())
 {
   MOZ_ASSERT(aOwnerWindow);
   MOZ_ASSERT(aOwnerWindow->IsInnerWindow());
 
   // Only AudioNodeStream of kind EXTERNAL_STREAM stores output audio data in
   // the track (see AudioNodeStream::AdvanceOutputSegment()). That means track
   // union stream in recorder session won't be able to copy data from the
   // stream of non-destination node. Create a pipe stream in this case.
--- a/dom/media/MediaRecorder.h
+++ b/dom/media/MediaRecorder.h
@@ -154,17 +154,16 @@ protected:
 
   // It specifies the container format as well as the audio and video capture formats.
   nsString mMimeType;
 
   uint32_t mAudioBitsPerSecond;
   uint32_t mVideoBitsPerSecond;
   uint32_t mBitsPerSecond;
 
-  const RefPtr<AbstractThread> mAbstractMainThread;
 private:
   // Register MediaRecorder into Document to listen the activity changes.
   void RegisterActivityObserver();
   void UnRegisterActivityObserver();
 
   bool CheckPermission(const nsString &aType);
 };
 
--- a/dom/media/MediaStreamGraph.cpp
+++ b/dom/media/MediaStreamGraph.cpp
@@ -1007,19 +1007,21 @@ MediaStreamGraphImpl::OpenAudioInputImpl
 }
 
 nsresult
 MediaStreamGraphImpl::OpenAudioInput(int aID,
                                      AudioDataListener *aListener)
 {
   // So, so, so annoying.  Can't AppendMessage except on Mainthread
   if (!NS_IsMainThread()) {
-    RefPtr<nsIRunnable> runnable = WrapRunnable(this,
-                                  &MediaStreamGraphImpl::OpenAudioInput,
-                                  aID, RefPtr<AudioDataListener>(aListener));
+    RefPtr<nsIRunnable> runnable =
+      WrapRunnable(this,
+                   &MediaStreamGraphImpl::OpenAudioInput,
+                   aID,
+                   RefPtr<AudioDataListener>(aListener));
     mAbstractMainThread->Dispatch(runnable.forget());
     return NS_OK;
   }
   class Message : public ControlMessage {
   public:
     Message(MediaStreamGraphImpl *aGraph, int aID,
             AudioDataListener *aListener) :
       ControlMessage(nullptr), mGraph(aGraph), mID(aID), mListener(aListener) {}
@@ -1078,19 +1080,20 @@ MediaStreamGraphImpl::CloseAudioInputImp
   }
 }
 
 void
 MediaStreamGraphImpl::CloseAudioInput(AudioDataListener *aListener)
 {
   // So, so, so annoying.  Can't AppendMessage except on Mainthread
   if (!NS_IsMainThread()) {
-    RefPtr<nsIRunnable> runnable = WrapRunnable(this,
-                                                &MediaStreamGraphImpl::CloseAudioInput,
-                                                RefPtr<AudioDataListener>(aListener));
+    RefPtr<nsIRunnable> runnable =
+      WrapRunnable(this,
+                   &MediaStreamGraphImpl::CloseAudioInput,
+                   RefPtr<AudioDataListener>(aListener));
     mAbstractMainThread->Dispatch(runnable.forget());
     return;
   }
   class Message : public ControlMessage {
   public:
     Message(MediaStreamGraphImpl *aGraph, AudioDataListener *aListener) :
       ControlMessage(nullptr), mGraph(aGraph), mListener(aListener) {}
     virtual void Run()
@@ -1884,33 +1887,32 @@ MediaStreamGraphImpl::AppendMessage(Uniq
 }
 
 void
 MediaStreamGraphImpl::Dispatch(already_AddRefed<nsIRunnable>&& aRunnable)
 {
   mAbstractMainThread->Dispatch(Move(aRunnable));
 }
 
-MediaStream::MediaStream(AbstractThread* aMainThread)
+MediaStream::MediaStream()
   : mTracksStartTime(0)
   , mStartBlocking(GRAPH_TIME_MAX)
   , mSuspendedCount(0)
   , mFinished(false)
   , mNotifiedFinished(false)
   , mNotifiedBlocked(false)
   , mHasCurrentData(false)
   , mNotifiedHasCurrentData(false)
   , mMainThreadCurrentTime(0)
   , mMainThreadFinished(false)
   , mFinishedNotificationSent(false)
   , mMainThreadDestroyed(false)
   , mNrOfMainThreadUsers(0)
   , mGraph(nullptr)
   , mAudioChannelType(dom::AudioChannel::Normal)
-  , mAbstractMainThread(aMainThread)
 {
   MOZ_COUNT_CTOR(MediaStream);
 }
 
 MediaStream::~MediaStream()
 {
   MOZ_COUNT_DTOR(MediaStream);
   NS_ASSERTION(mMainThreadDestroyed, "Should have been destroyed already");
@@ -2548,43 +2550,37 @@ MediaStream::RunAfterPendingUpdates(alre
   // runnable will run in finite time.
   if (!(graph->mRealtime || graph->mNonRealtimeProcessing)) {
     runnable->Run();
     return;
   }
 
   class Message : public ControlMessage {
   public:
-    Message(MediaStream* aStream,
-            already_AddRefed<nsIRunnable> aRunnable,
-            AbstractThread* aMainThread)
+    Message(MediaStream* aStream, already_AddRefed<nsIRunnable> aRunnable)
       : ControlMessage(aStream)
       , mRunnable(aRunnable)
-      , mAbstractMainThread(aMainThread)
-      {}
+    {}
     void Run() override
     {
-      mStream->Graph()->
-        DispatchToMainThreadAfterStreamStateUpdate(mAbstractMainThread,
-                                                   mRunnable.forget());
+      mStream->Graph()->DispatchToMainThreadAfterStreamStateUpdate(
+        mRunnable.forget());
     }
     void RunDuringShutdown() override
     {
       // Don't run mRunnable now as it may call AppendMessage() which would
       // assume that there are no remaining controlMessagesToRunDuringShutdown.
       MOZ_ASSERT(NS_IsMainThread());
       mStream->GraphImpl()->Dispatch(mRunnable.forget());
     }
   private:
     nsCOMPtr<nsIRunnable> mRunnable;
-    const RefPtr<AbstractThread> mAbstractMainThread;
   };
 
-  graph->AppendMessage(
-    MakeUnique<Message>(this, runnable.forget(), mAbstractMainThread));
+  graph->AppendMessage(MakeUnique<Message>(this, runnable.forget()));
 }
 
 void
 MediaStream::SetTrackEnabledImpl(TrackID aTrackID, DisabledTrackMode aMode)
 {
   if (aMode == DisabledTrackMode::ENABLED) {
     for (int32_t i = mDisabledTracks.Length() - 1; i >= 0; --i) {
       if (aTrackID == mDisabledTracks[i].mTrackID) {
@@ -2686,26 +2682,26 @@ MediaStream::AddMainThreadListener(MainT
 
   private:
     ~NotifyRunnable() {}
 
     RefPtr<MediaStream> mStream;
   };
 
   nsCOMPtr<nsIRunnable> runnable = new NotifyRunnable(this);
-  mAbstractMainThread->Dispatch(runnable.forget());
+  GraphImpl()->Dispatch(runnable.forget());
 }
 
-SourceMediaStream::SourceMediaStream(AbstractThread* aMainThread) :
-  MediaStream(aMainThread),
-  mMutex("mozilla::media::SourceMediaStream"),
-  mUpdateKnownTracksTime(0),
-  mPullEnabled(false),
-  mUpdateFinished(false),
-  mNeedsMixing(false)
+SourceMediaStream::SourceMediaStream()
+  : MediaStream()
+  , mMutex("mozilla::media::SourceMediaStream")
+  , mUpdateKnownTracksTime(0)
+  , mPullEnabled(false)
+  , mUpdateFinished(false)
+  , mNeedsMixing(false)
 {
 }
 
 nsresult
 SourceMediaStream::OpenAudioInput(int aID,
                                   AudioDataListener *aListener)
 {
   if (GraphImpl()) {
@@ -3285,53 +3281,53 @@ MediaInputPort::BlockSourceTrackIdImpl(T
 already_AddRefed<Pledge<bool>>
 MediaInputPort::BlockSourceTrackId(TrackID aTrackId, BlockingMode aBlockingMode)
 {
   class Message : public ControlMessage {
   public:
     Message(MediaInputPort* aPort,
             TrackID aTrackId,
             BlockingMode aBlockingMode,
-            already_AddRefed<nsIRunnable> aRunnable,
-            AbstractThread* aMainThread)
-      : ControlMessage(aPort->GetDestination()),
-        mPort(aPort), mTrackId(aTrackId), mBlockingMode(aBlockingMode),
-        mRunnable(aRunnable), mAbstractMainThread(aMainThread) {}
+            already_AddRefed<nsIRunnable> aRunnable)
+      : ControlMessage(aPort->GetDestination())
+      , mPort(aPort)
+      , mTrackId(aTrackId)
+      , mBlockingMode(aBlockingMode)
+      , mRunnable(aRunnable)
+    {
+    }
     void Run() override
     {
       mPort->BlockSourceTrackIdImpl(mTrackId, mBlockingMode);
       if (mRunnable) {
-        mStream->Graph()->
-          DispatchToMainThreadAfterStreamStateUpdate(mAbstractMainThread,
-                                                     mRunnable.forget());
+        mStream->Graph()->DispatchToMainThreadAfterStreamStateUpdate(
+          mRunnable.forget());
       }
     }
     void RunDuringShutdown() override
     {
       Run();
     }
     RefPtr<MediaInputPort> mPort;
     TrackID mTrackId;
     BlockingMode mBlockingMode;
     nsCOMPtr<nsIRunnable> mRunnable;
-    const RefPtr<AbstractThread> mAbstractMainThread;
   };
 
   MOZ_ASSERT(IsTrackIDExplicit(aTrackId),
              "Only explicit TrackID is allowed");
 
   auto pledge = MakeRefPtr<Pledge<bool>>();
   nsCOMPtr<nsIRunnable> runnable = NewRunnableFrom([pledge]() {
     MOZ_ASSERT(NS_IsMainThread());
     pledge->Resolve(true);
     return NS_OK;
   });
-  GraphImpl()->AppendMessage(MakeUnique<Message>(this, aTrackId, aBlockingMode,
-                                                 runnable.forget(),
-                                                 mAbstractMainThread));
+  GraphImpl()->AppendMessage(
+    MakeUnique<Message>(this, aTrackId, aBlockingMode, runnable.forget()));
   return pledge.forget();
 }
 
 already_AddRefed<MediaInputPort>
 ProcessedMediaStream::AllocateInputPort(MediaStream* aStream, TrackID aTrackID,
                                         TrackID aDestTrackID,
                                         uint16_t aInputNumber, uint16_t aOutputNumber,
                                         nsTArray<TrackID>* aBlockedTracks)
@@ -3359,19 +3355,18 @@ ProcessedMediaStream::AllocateInputPort(
 
   MOZ_ASSERT(aStream->GraphImpl() == GraphImpl());
   MOZ_ASSERT(aTrackID == TRACK_ANY || IsTrackIDExplicit(aTrackID),
              "Only TRACK_ANY and explicit ID are allowed for source track");
   MOZ_ASSERT(aDestTrackID == TRACK_ANY || IsTrackIDExplicit(aDestTrackID),
              "Only TRACK_ANY and explicit ID are allowed for destination track");
   MOZ_ASSERT(aTrackID != TRACK_ANY || aDestTrackID == TRACK_ANY,
              "Generic MediaInputPort cannot produce a single destination track");
-  RefPtr<MediaInputPort> port =
-    new MediaInputPort(aStream, aTrackID, this, aDestTrackID,
-                       aInputNumber, aOutputNumber, mAbstractMainThread);
+  RefPtr<MediaInputPort> port = new MediaInputPort(
+    aStream, aTrackID, this, aDestTrackID, aInputNumber, aOutputNumber);
   if (aBlockedTracks) {
     for (TrackID trackID : *aBlockedTracks) {
       port->BlockSourceTrackIdImpl(trackID, BlockingMode::CREATION);
     }
   }
   port->SetGraphImpl(GraphImpl());
   GraphImpl()->AppendMessage(MakeUnique<Message>(port));
   return port.forget();
@@ -3444,25 +3439,25 @@ MediaStreamGraphImpl::MediaStreamGraphIm
   , mForceShutDown(false)
   , mPostedRunInStableStateEvent(false)
   , mDetectedNotRunning(false)
   , mPostedRunInStableState(false)
   , mRealtime(aDriverRequested != OFFLINE_THREAD_DRIVER)
   , mNonRealtimeProcessing(false)
   , mStreamOrderDirty(false)
   , mLatencyLog(AsyncLatencyLogger::Get())
+  , mAbstractMainThread(aMainThread)
 #ifdef MOZ_WEBRTC
   , mFarendObserverRef(nullptr)
 #endif
   , mSelfRef(this)
 #ifdef DEBUG
   , mCanRunMessagesSynchronously(false)
 #endif
   , mAudioChannel(aChannel)
-  , mAbstractMainThread(aMainThread)
 {
   if (mRealtime) {
     if (aDriverRequested == AUDIO_THREAD_DRIVER) {
       AudioCallbackDriver* driver = new AudioCallbackDriver(this);
       mDriver = driver;
     } else {
       mDriver = new SystemClockDriver(this);
     }
@@ -3470,16 +3465,23 @@ MediaStreamGraphImpl::MediaStreamGraphIm
     mDriver = new OfflineClockDriver(this, MEDIA_GRAPH_TARGET_PERIOD_MS);
   }
 
   mLastMainThreadUpdate = TimeStamp::Now();
 
   RegisterWeakAsyncMemoryReporter(this);
 }
 
+AbstractThread*
+MediaStreamGraph::AbstractMainThread()
+{
+  MOZ_ASSERT(static_cast<MediaStreamGraphImpl*>(this)->mAbstractMainThread);
+  return static_cast<MediaStreamGraphImpl*>(this)->mAbstractMainThread;
+}
+
 void
 MediaStreamGraphImpl::Destroy()
 {
   // First unregister from memory reporting.
   UnregisterWeakMemoryReporter(this);
 
   // Clear the self reference which will destroy this instance.
   mSelfRef = nullptr;
@@ -3575,21 +3577,21 @@ MediaStreamGraph::GetInstance(MediaStrea
 
 MediaStreamGraph*
 MediaStreamGraph::CreateNonRealtimeInstance(TrackRate aSampleRate,
                                             nsPIDOMWindowInner* aWindow)
 {
   NS_ASSERTION(NS_IsMainThread(), "Main thread only");
 
   nsCOMPtr<nsIGlobalObject> parentObject = do_QueryInterface(aWindow);
-  MediaStreamGraphImpl* graph =
-    new MediaStreamGraphImpl(OFFLINE_THREAD_DRIVER,
-                             aSampleRate,
-                             AudioChannel::Normal,
-                             parentObject->AbstractMainThreadFor(TaskCategory::Other));
+  MediaStreamGraphImpl* graph = new MediaStreamGraphImpl(
+    OFFLINE_THREAD_DRIVER,
+    aSampleRate,
+    AudioChannel::Normal,
+    parentObject->AbstractMainThreadFor(TaskCategory::Other));
 
   LOG(LogLevel::Debug, ("Starting up Offline MediaStreamGraph %p", graph));
 
   return graph;
 }
 
 void
 MediaStreamGraph::DestroyNonRealtimeInstance(MediaStreamGraph* aGraph)
@@ -3750,35 +3752,35 @@ FinishCollectReports(nsIHandleReportCall
   }
 
 #undef REPORT
 
   manager->EndReport();
 }
 
 SourceMediaStream*
-MediaStreamGraph::CreateSourceStream(AbstractThread* aMainThread)
+MediaStreamGraph::CreateSourceStream()
 {
-  SourceMediaStream* stream = new SourceMediaStream(aMainThread);
+  SourceMediaStream* stream = new SourceMediaStream();
   AddStream(stream);
   return stream;
 }
 
 ProcessedMediaStream*
-MediaStreamGraph::CreateTrackUnionStream(AbstractThread* aMainThread)
+MediaStreamGraph::CreateTrackUnionStream()
 {
-  TrackUnionStream* stream = new TrackUnionStream(aMainThread);
+  TrackUnionStream* stream = new TrackUnionStream();
   AddStream(stream);
   return stream;
 }
 
 ProcessedMediaStream*
-MediaStreamGraph::CreateAudioCaptureStream(TrackID aTrackId, AbstractThread* aMainThread)
+MediaStreamGraph::CreateAudioCaptureStream(TrackID aTrackId)
 {
-  AudioCaptureStream* stream = new AudioCaptureStream(aTrackId, aMainThread);
+  AudioCaptureStream* stream = new AudioCaptureStream(aTrackId);
   AddStream(stream);
   return stream;
 }
 
 void
 MediaStreamGraph::AddStream(MediaStream* aStream)
 {
   NS_ADDREF(aStream);
@@ -4171,19 +4173,17 @@ MediaStreamGraphImpl::ConnectToCaptureSt
       ProcessedMediaStream* sink = mWindowCaptureStreams[i].mCaptureStreamSink;
       return sink->AllocateInputPort(aMediaStream);
     }
   }
   return nullptr;
 }
 
 void
-MediaStreamGraph::
-DispatchToMainThreadAfterStreamStateUpdate(AbstractThread* aMainThread,
-                                           already_AddRefed<nsIRunnable> aRunnable)
+MediaStreamGraph::DispatchToMainThreadAfterStreamStateUpdate(
+  already_AddRefed<nsIRunnable> aRunnable)
 {
-  MOZ_ASSERT(aMainThread);
   AssertOnGraphThreadOrNotRunning();
   *mPendingUpdateRunnables.AppendElement() =
-    aMainThread->CreateDirectTaskDrainer(Move(aRunnable));
+    AbstractMainThread()->CreateDirectTaskDrainer(Move(aRunnable));
 }
 
 } // namespace mozilla
--- a/dom/media/MediaStreamGraph.h
+++ b/dom/media/MediaStreamGraph.h
@@ -72,17 +72,16 @@ namespace media {
  *
  * Media decoding, audio processing and media playback use thread-safe APIs to
  * the media graph to ensure they can continue while the main thread is blocked.
  *
  * When the graph is changed, we may need to throw out buffered data and
  * reprocess it. This is triggered automatically by the MediaStreamGraph.
  */
 
-class AbstractThread;
 class AudioNodeEngine;
 class AudioNodeExternalInputStream;
 class AudioNodeStream;
 class MediaInputPort;
 class MediaStream;
 class MediaStreamGraph;
 class MediaStreamGraphImpl;
 class ProcessedMediaStream;
@@ -250,17 +249,17 @@ struct TrackBound
 #undef GetCurrentTime
 #endif
 
 class MediaStream : public mozilla::LinkedListElement<MediaStream>
 {
 public:
   NS_INLINE_DECL_THREADSAFE_REFCOUNTING(MediaStream)
 
-  explicit MediaStream(AbstractThread* aMainThread);
+  explicit MediaStream();
 
 protected:
   // Protected destructor, to discourage deletion outside of Release():
   virtual ~MediaStream();
 
 public:
   /**
    * Returns the graph that owns this stream.
@@ -660,30 +659,28 @@ protected:
   bool mFinishedNotificationSent;
   bool mMainThreadDestroyed;
   int mNrOfMainThreadUsers;
 
   // Our media stream graph.  null if destroyed on the graph thread.
   MediaStreamGraphImpl* mGraph;
 
   dom::AudioChannel mAudioChannelType;
-
-  const RefPtr<AbstractThread> mAbstractMainThread;
 };
 
 /**
  * This is a stream into which a decoder can write audio and video.
  *
  * Audio and video can be written on any thread, but you probably want to
  * always write from the same thread to avoid unexpected interleavings.
  */
 class SourceMediaStream : public MediaStream
 {
 public:
-  explicit SourceMediaStream(AbstractThread* aMainThread);
+  explicit SourceMediaStream();
 
   SourceMediaStream* AsSourceStream() override { return this; }
 
   // Media graph thread only
 
   // Users of audio inputs go through the stream so it can track when the
   // last stream referencing an input goes away, so it can close the cubeb
   // input.  Also note: callable on any thread (though it bounces through
@@ -952,28 +949,29 @@ enum class BlockingMode
  * clears its reference (the last main-thread reference to the object). When
  * the Destroy message is processed on the graph manager thread we disconnect
  * the port and drop the graph's reference, destroying the object.
  */
 class MediaInputPort final
 {
 private:
   // Do not call this constructor directly. Instead call aDest->AllocateInputPort.
-  MediaInputPort(MediaStream* aSource, TrackID& aSourceTrack,
-                 ProcessedMediaStream* aDest, TrackID& aDestTrack,
-                 uint16_t aInputNumber, uint16_t aOutputNumber,
-                 AbstractThread* aMainThread)
+  MediaInputPort(MediaStream* aSource,
+                 TrackID& aSourceTrack,
+                 ProcessedMediaStream* aDest,
+                 TrackID& aDestTrack,
+                 uint16_t aInputNumber,
+                 uint16_t aOutputNumber)
     : mSource(aSource)
     , mSourceTrack(aSourceTrack)
     , mDest(aDest)
     , mDestTrack(aDestTrack)
     , mInputNumber(aInputNumber)
     , mOutputNumber(aOutputNumber)
     , mGraph(nullptr)
-    , mAbstractMainThread(aMainThread)
   {
     MOZ_COUNT_CTOR(MediaInputPort);
   }
 
   // Private destructor, to discourage deletion outside of Release():
   ~MediaInputPort()
   {
     MOZ_COUNT_DTOR(MediaInputPort);
@@ -1106,30 +1104,30 @@ private:
   const uint16_t mInputNumber;
   const uint16_t mOutputNumber;
 
   typedef Pair<TrackID, BlockingMode> BlockedTrack;
   nsTArray<BlockedTrack> mBlockedTracks;
 
   // Our media stream graph
   MediaStreamGraphImpl* mGraph;
-
-  const RefPtr<AbstractThread> mAbstractMainThread;
 };
 
 /**
  * This stream processes zero or more input streams in parallel to produce
  * its output. The details of how the output is produced are handled by
  * subclasses overriding the ProcessInput method.
  */
 class ProcessedMediaStream : public MediaStream
 {
 public:
-  explicit ProcessedMediaStream(AbstractThread* aMainThread)
-    : MediaStream(aMainThread), mAutofinish(false), mCycleMarker(0)
+  explicit ProcessedMediaStream()
+    : MediaStream()
+    , mAutofinish(false)
+    , mCycleMarker(0)
   {}
 
   // Control API.
   /**
    * Allocates a new input port attached to source aStream.
    * This stream can be removed by calling MediaInputPort::Remove().
    *
    * The input port is tied to aTrackID in the source stream.
@@ -1243,17 +1241,17 @@ protected:
   bool mAutofinish;
   // After UpdateStreamOrder(), mCycleMarker is either 0 or 1 to indicate
   // whether this stream is in a muted cycle.  During ordering it can contain
   // other marker values - see MediaStreamGraphImpl::UpdateStreamOrder().
   uint32_t mCycleMarker;
 };
 
 /**
- * There can be multiple MediaStreamGraph per process: one per AudioChannel.
+ * There is a single MediaStreamGraph per window.
  * Additionaly, each OfflineAudioContext object creates its own MediaStreamGraph
  * object too..
  */
 class MediaStreamGraph
 {
 public:
 
   // We ensure that the graph current time advances in multiples of
@@ -1272,53 +1270,58 @@ public:
     OFFLINE_THREAD_DRIVER
   };
   static const uint32_t AUDIO_CALLBACK_DRIVER_SHUTDOWN_TIMEOUT = 20*1000;
 
   // Main thread only
   static MediaStreamGraph* GetInstance(GraphDriverType aGraphDriverRequested,
                                        dom::AudioChannel aChannel,
                                        nsPIDOMWindowInner* aWindow);
-  static MediaStreamGraph* CreateNonRealtimeInstance(TrackRate aSampleRate,
-                                                     nsPIDOMWindowInner* aWindowId);
+  static MediaStreamGraph* CreateNonRealtimeInstance(
+    TrackRate aSampleRate,
+    nsPIDOMWindowInner* aWindowId);
+
+  // Return the correct main thread for this graph. This always returns
+  // something that is valid. Thread safe.
+  AbstractThread* AbstractMainThread();
+
   // Idempotent
   static void DestroyNonRealtimeInstance(MediaStreamGraph* aGraph);
 
   virtual nsresult OpenAudioInput(int aID,
                                   AudioDataListener *aListener) {
     return NS_ERROR_FAILURE;
   }
   virtual void CloseAudioInput(AudioDataListener *aListener) {}
 
   // Control API.
   /**
    * Create a stream that a media decoder (or some other source of
    * media data, such as a camera) can write to.
    */
-  SourceMediaStream* CreateSourceStream(AbstractThread* aMainThread);
+  SourceMediaStream* CreateSourceStream();
   /**
    * Create a stream that will form the union of the tracks of its input
    * streams.
    * A TrackUnionStream contains all the tracks of all its input streams.
    * Adding a new input stream makes that stream's tracks immediately appear as new
    * tracks starting at the time the input stream was added.
    * Removing an input stream makes the output tracks corresponding to the
    * removed tracks immediately end.
    * For each added track, the track ID of the output track is the track ID
    * of the input track or one plus the maximum ID of all previously added
    * tracks, whichever is greater.
    * TODO at some point we will probably need to add API to select
    * particular tracks of each input stream.
    */
-  ProcessedMediaStream* CreateTrackUnionStream(AbstractThread* aMainThread);
+  ProcessedMediaStream* CreateTrackUnionStream();
   /**
    * Create a stream that will mix all its audio input.
    */
-  ProcessedMediaStream* CreateAudioCaptureStream(TrackID aTrackId,
-                                                 AbstractThread* aMainThread);
+  ProcessedMediaStream* CreateAudioCaptureStream(TrackID aTrackId);
 
   /**
    * Add a new stream to the graph.  Main thread.
    */
   void AddStream(MediaStream* aStream);
 
   /* From the main thread, ask the MSG to send back an event when the graph
    * thread is running, and audio is being processed. */
@@ -1346,29 +1349,19 @@ public:
 
   /**
    * Media graph thread only.
    * Dispatches a runnable that will run on the main thread after all
    * main-thread stream state has been next updated.
    *
    * Should only be called during MediaStreamListener callbacks or during
    * ProcessedMediaStream::ProcessInput().
-   *
-   * |aMainThread| is the corresponding AbstractThread on the main thread to
-   * drain the direct tasks generated by |aRunnable|.
-   * Note: The reasons for assigning proper |aMainThread| are
-   * - MSG serves media elements in multiple windows run on main thread.
-   * - DocGroup-specific AbstractMainThread is introduced to cluster the tasks
-   *   of the same window for prioritizing tasks among different windows.
-   * - Proper |aMainThread| ensures that tasks dispatched to the main thread are
-   *   clustered to the right queue and are executed in right order.
    */
-  virtual void
-  DispatchToMainThreadAfterStreamStateUpdate(AbstractThread* aMainThread,
-                                             already_AddRefed<nsIRunnable> aRunnable);
+  virtual void DispatchToMainThreadAfterStreamStateUpdate(
+    already_AddRefed<nsIRunnable> aRunnable);
 
   /**
    * Returns graph sample rate in Hz.
    */
   TrackRate GraphRate() const { return mSampleRate; }
 
   void RegisterCaptureStreamForWindow(uint64_t aWindowId,
                                       ProcessedMediaStream* aCaptureStream);
--- a/dom/media/MediaStreamGraphImpl.h
+++ b/dom/media/MediaStreamGraphImpl.h
@@ -807,16 +807,17 @@ public:
    * blocking order.
    */
   bool mStreamOrderDirty;
   /**
    * Hold a ref to the Latency logger
    */
   RefPtr<AsyncLatencyLogger> mLatencyLog;
   AudioMixer mMixer;
+  const RefPtr<AbstractThread> mAbstractMainThread;
 #ifdef MOZ_WEBRTC
   RefPtr<AudioOutputObserver> mFarendObserverRef;
 #endif
 
   dom::AudioChannel AudioChannel() const { return mAudioChannel; }
 
   // used to limit graph shutdown time
   nsCOMPtr<nsITimer> mShutdownTimer;
@@ -848,14 +849,13 @@ private:
 #ifdef DEBUG
   /**
    * Used to assert when AppendMessage() runs ControlMessages synchronously.
    */
   bool mCanRunMessagesSynchronously;
 #endif
 
   dom::AudioChannel mAudioChannel;
-  const RefPtr<AbstractThread> mAbstractMainThread;
 };
 
 } // namespace mozilla
 
 #endif /* MEDIASTREAMGRAPHIMPL_H_ */
--- a/dom/media/MediaStreamTrack.cpp
+++ b/dom/media/MediaStreamTrack.cpp
@@ -83,17 +83,16 @@ NS_IMPL_CYCLE_COLLECTION_0(MediaStreamTr
  * will be a combination of its old principal and all the new ones until the
  * latest main thread principal matches the PrincipalHandle on the MSG thread.
  */
 class MediaStreamTrack::PrincipalHandleListener : public MediaStreamTrackListener
 {
 public:
   explicit PrincipalHandleListener(MediaStreamTrack* aTrack)
     : mTrack(aTrack)
-    , mAbstractMainThread(aTrack->mOwningStream->AbstractMainThread())
     {}
 
   void Forget()
   {
     MOZ_ASSERT(NS_IsMainThread());
     mTrack = nullptr;
   }
 
@@ -107,29 +106,27 @@ public:
 
     mTrack->NotifyPrincipalHandleChanged(aNewPrincipalHandle);
   }
 
   void NotifyPrincipalHandleChanged(MediaStreamGraph* aGraph,
                                     const PrincipalHandle& aNewPrincipalHandle) override
   {
     aGraph->DispatchToMainThreadAfterStreamStateUpdate(
-      mAbstractMainThread,
       NewRunnableMethod<StoreCopyPassByConstLRef<PrincipalHandle>>(
         "dom::MediaStreamTrack::PrincipalHandleListener::"
         "DoNotifyPrincipalHandleChanged",
         this,
         &PrincipalHandleListener::DoNotifyPrincipalHandleChanged,
         aNewPrincipalHandle));
   }
 
 protected:
   // These fields may only be accessed on the main thread
   MediaStreamTrack* mTrack;
-  const RefPtr<AbstractThread> mAbstractMainThread;
 };
 
 MediaStreamTrack::MediaStreamTrack(DOMMediaStream* aStream, TrackID aTrackID,
                                    TrackID aInputTrackID,
                                    MediaStreamTrackSource* aSource,
                                    const MediaTrackConstraints& aConstraints)
   : mOwningStream(aStream), mTrackID(aTrackID),
     mInputTrackID(aInputTrackID), mSource(aSource),
--- a/dom/media/TrackUnionStream.cpp
+++ b/dom/media/TrackUnionStream.cpp
@@ -41,18 +41,19 @@ namespace mozilla {
 
 #ifdef STREAM_LOG
 #undef STREAM_LOG
 #endif
 
 LazyLogModule gTrackUnionStreamLog("TrackUnionStream");
 #define STREAM_LOG(type, msg) MOZ_LOG(gTrackUnionStreamLog, type, msg)
 
-TrackUnionStream::TrackUnionStream(AbstractThread* aMainThread) :
-  ProcessedMediaStream(aMainThread), mNextAvailableTrackID(1)
+TrackUnionStream::TrackUnionStream()
+  : ProcessedMediaStream()
+  , mNextAvailableTrackID(1)
 {
 }
 
   void TrackUnionStream::RemoveInput(MediaInputPort* aPort)
   {
     STREAM_LOG(LogLevel::Debug, ("TrackUnionStream %p removing input %p", this, aPort));
     for (int32_t i = mTrackMap.Length() - 1; i >= 0; --i) {
       if (mTrackMap[i].mInputPort == aPort) {
--- a/dom/media/TrackUnionStream.h
+++ b/dom/media/TrackUnionStream.h
@@ -12,17 +12,17 @@
 
 namespace mozilla {
 
 /**
  * See MediaStreamGraph::CreateTrackUnionStream.
  */
 class TrackUnionStream : public ProcessedMediaStream {
 public:
-  explicit TrackUnionStream(AbstractThread* aMainThread);
+  explicit TrackUnionStream();
 
   virtual TrackUnionStream* AsTrackUnionStream() override { return this; }
   friend class DOMMediaStream;
 
   void RemoveInput(MediaInputPort* aPort) override;
   void ProcessInput(GraphTime aFrom, GraphTime aTo, uint32_t aFlags) override;
 
   void SetTrackEnabledImpl(TrackID aTrackID, DisabledTrackMode aMode) override;
--- a/dom/media/mediasink/DecodedStream.cpp
+++ b/dom/media/mediasink/DecodedStream.cpp
@@ -54,17 +54,16 @@ public:
       mOnOutput.Notify(t);
     }
   }
 
   void NotifyEvent(MediaStreamGraph* aGraph, MediaStreamGraphEvent event) override
   {
     if (event == MediaStreamGraphEvent::EVENT_FINISHED) {
       aGraph->DispatchToMainThreadAfterStreamStateUpdate(
-        mAbstractMainThread,
         NewRunnableMethod("DecodedStreamGraphListener::DoNotifyFinished",
                           this,
                           &DecodedStreamGraphListener::DoNotifyFinished));
     }
   }
 
   void DoNotifyFinished()
   {
@@ -178,17 +177,17 @@ DecodedStreamData::DecodedStreamData(Out
                                      MozPromiseHolder<GenericPromise>&& aPromise,
                                      AbstractThread* aMainThread)
   : mAudioFramesWritten(0)
   , mNextVideoTime(aInit.mStartTime)
   , mNextAudioTime(aInit.mStartTime)
   , mHaveSentFinish(false)
   , mHaveSentFinishAudio(false)
   , mHaveSentFinishVideo(false)
-  , mStream(aOutputStreamManager->Graph()->CreateSourceStream(aMainThread))
+  , mStream(aOutputStreamManager->Graph()->CreateSourceStream())
   // DecodedStreamGraphListener will resolve this promise.
   , mListener(new DecodedStreamGraphListener(mStream, Move(aPromise), aMainThread))
   // mPlaying is initially true because MDSM won't start playback until playing
   // becomes true. This is consistent with the settings of AudioSink.
   , mPlaying(true)
   , mEOSVideoCompensation(false)
   , mOutputStreamManager(aOutputStreamManager)
   , mAbstractMainThread(aMainThread)
--- a/dom/media/webaudio/AudioDestinationNode.cpp
+++ b/dom/media/webaudio/AudioDestinationNode.cpp
@@ -163,18 +163,17 @@ public:
                           mSampleRate, mBuffer.forget(), rv);
     if (rv.Failed()) {
       rv.SuppressException();
       return;
     }
 
     aNode->ResolvePromise(renderedBuffer);
 
-    mAbstractMainThread->Dispatch(do_AddRef(new OnCompleteTask(context,
-                                                               renderedBuffer)));
+    context->Dispatch(do_AddRef(new OnCompleteTask(context, renderedBuffer)));
 
     context->OnStateChanged(nullptr, AudioContextState::Closed);
   }
 
   size_t SizeOfExcludingThis(MallocSizeOf aMallocSizeOf) const override
   {
     size_t amount = AudioNodeEngine::SizeOfExcludingThis(aMallocSizeOf);
     if (mBuffer) {
@@ -256,19 +255,18 @@ public:
     }
 
     bool newInputMuted = aInput.IsNull() || aInput.IsMuted();
     if (newInputMuted != mLastInputMuted) {
       mLastInputMuted = newInputMuted;
 
       RefPtr<InputMutedRunnable> runnable =
         new InputMutedRunnable(aStream, newInputMuted);
-      aStream->Graph()->
-        DispatchToMainThreadAfterStreamStateUpdate(mAbstractMainThread,
-                                                   runnable.forget());
+      aStream->Graph()->DispatchToMainThreadAfterStreamStateUpdate(
+        runnable.forget());
     }
   }
 
   bool IsActive() const override
   {
     // Keep processing to track stream time, which is used for all timelines
     // associated with the same AudioContext.  If there are no other engines
     // for the AudioContext, then this could return false to suspend the
@@ -331,19 +329,21 @@ AudioDestinationNode::AudioDestinationNo
   , mFramesToProduce(aLength)
   , mAudioChannel(AudioChannel::Normal)
   , mIsOffline(aIsOffline)
   , mAudioChannelSuspended(false)
   , mCaptured(false)
   , mAudible(AudioChannelService::AudibleState::eAudible)
 {
   nsPIDOMWindowInner* window = aContext->GetParentObject();
-  MediaStreamGraph* graph = aIsOffline ?
-                            MediaStreamGraph::CreateNonRealtimeInstance(aSampleRate, window) :
-                            MediaStreamGraph::GetInstance(MediaStreamGraph::AUDIO_THREAD_DRIVER, aChannel, window);
+  MediaStreamGraph* graph =
+    aIsOffline
+      ? MediaStreamGraph::CreateNonRealtimeInstance(aSampleRate, window)
+      : MediaStreamGraph::GetInstance(
+          MediaStreamGraph::AUDIO_THREAD_DRIVER, aChannel, window);
   AudioNodeEngine* engine = aIsOffline ?
                             new OfflineDestinationNodeEngine(this, aNumberOfChannels,
                                                              aLength, aSampleRate) :
                             static_cast<AudioNodeEngine*>(new DestinationNodeEngine(this));
 
   AudioNodeStream::Flags flags =
     AudioNodeStream::NEED_MAIN_THREAD_CURRENT_TIME |
     AudioNodeStream::NEED_MAIN_THREAD_FINISHED |
--- a/dom/media/webaudio/AudioNodeExternalInputStream.cpp
+++ b/dom/media/webaudio/AudioNodeExternalInputStream.cpp
@@ -10,38 +10,38 @@
 #include "AudioChannelFormat.h"
 #include "mozilla/dom/MediaStreamAudioSourceNode.h"
 
 using namespace mozilla::dom;
 
 namespace mozilla {
 
 AudioNodeExternalInputStream::AudioNodeExternalInputStream(
-  AudioNodeEngine* aEngine, TrackRate aSampleRate, AbstractThread* aMainThread)
-  : AudioNodeStream(aEngine, NO_STREAM_FLAGS, aSampleRate, aMainThread)
+  AudioNodeEngine* aEngine,
+  TrackRate aSampleRate)
+  : AudioNodeStream(aEngine, NO_STREAM_FLAGS, aSampleRate)
 {
   MOZ_COUNT_CTOR(AudioNodeExternalInputStream);
 }
 
 AudioNodeExternalInputStream::~AudioNodeExternalInputStream()
 {
   MOZ_COUNT_DTOR(AudioNodeExternalInputStream);
 }
 
 /* static */ already_AddRefed<AudioNodeExternalInputStream>
 AudioNodeExternalInputStream::Create(MediaStreamGraph* aGraph,
-                                     AudioNodeEngine* aEngine,
-                                     AbstractThread* aMainThread)
+                                     AudioNodeEngine* aEngine)
 {
   AudioContext* ctx = aEngine->NodeMainThread()->Context();
   MOZ_ASSERT(NS_IsMainThread());
   MOZ_ASSERT(aGraph->GraphRate() == ctx->SampleRate());
 
   RefPtr<AudioNodeExternalInputStream> stream =
-    new AudioNodeExternalInputStream(aEngine, aGraph->GraphRate(), aMainThread);
+    new AudioNodeExternalInputStream(aEngine, aGraph->GraphRate());
   stream->mSuspendedCount += ctx->ShouldSuspendNewStream();
   aGraph->AddStream(stream);
   return stream.forget();
 }
 
 /**
  * Copies the data in aInput to aOffsetInBlock within aBlock.
  * aBlock must have been allocated with AllocateInputBlock and have a channel
--- a/dom/media/webaudio/AudioNodeExternalInputStream.h
+++ b/dom/media/webaudio/AudioNodeExternalInputStream.h
@@ -18,22 +18,22 @@ class AbstractThread;
  * This is a MediaStream implementation that acts for a Web Audio node but
  * unlike other AudioNodeStreams, supports any kind of MediaStream as an
  * input --- handling any number of audio tracks and handling blocking of
  * the input MediaStream.
  */
 class AudioNodeExternalInputStream final : public AudioNodeStream
 {
 public:
-  static already_AddRefed<AudioNodeExternalInputStream>
-  Create(MediaStreamGraph* aGraph, AudioNodeEngine* aEngine, AbstractThread* aMainThread);
+  static already_AddRefed<AudioNodeExternalInputStream> Create(
+    MediaStreamGraph* aGraph,
+    AudioNodeEngine* aEngine);
 
 protected:
-  AudioNodeExternalInputStream(AudioNodeEngine* aEngine, TrackRate aSampleRate,
-                               AbstractThread* aMainThread);
+  AudioNodeExternalInputStream(AudioNodeEngine* aEngine, TrackRate aSampleRate);
   ~AudioNodeExternalInputStream();
 
 public:
   void ProcessInput(GraphTime aFrom, GraphTime aTo, uint32_t aFlags) override;
 
 private:
   /**
    * Determines if this is enabled or not.  Disabled nodes produce silence.
--- a/dom/media/webaudio/AudioNodeStream.cpp
+++ b/dom/media/webaudio/AudioNodeStream.cpp
@@ -24,27 +24,26 @@ namespace mozilla {
  * for regular audio contexts, and the rate requested by the web content
  * for offline audio contexts.
  * Each chunk in the track is a single block of WEBAUDIO_BLOCK_SIZE samples.
  * Note: This must be a different value than MEDIA_STREAM_DEST_TRACK_ID
  */
 
 AudioNodeStream::AudioNodeStream(AudioNodeEngine* aEngine,
                                  Flags aFlags,
-                                 TrackRate aSampleRate,
-                                 AbstractThread* aMainThread)
-  : ProcessedMediaStream(aMainThread),
-    mEngine(aEngine),
-    mSampleRate(aSampleRate),
-    mFlags(aFlags),
-    mNumberOfInputChannels(2),
-    mIsActive(aEngine->IsActive()),
-    mMarkAsFinishedAfterThisBlock(false),
-    mAudioParamStream(false),
-    mPassThrough(false)
+                                 TrackRate aSampleRate)
+  : ProcessedMediaStream()
+  , mEngine(aEngine)
+  , mSampleRate(aSampleRate)
+  , mFlags(aFlags)
+  , mNumberOfInputChannels(2)
+  , mIsActive(aEngine->IsActive())
+  , mMarkAsFinishedAfterThisBlock(false)
+  , mAudioParamStream(false)
+  , mPassThrough(false)
 {
   MOZ_ASSERT(NS_IsMainThread());
   mSuspendedCount = !(mIsActive || mFlags & EXTERNAL_OUTPUT);
   mChannelCountMode = ChannelCountMode::Max;
   mChannelInterpretation = ChannelInterpretation::Speakers;
   // AudioNodes are always producing data
   mHasCurrentData = true;
   mLastChunks.SetLength(std::max(uint16_t(1), mEngine->OutputCount()));
@@ -73,18 +72,17 @@ AudioNodeStream::Create(AudioContext* aC
 {
   MOZ_ASSERT(NS_IsMainThread());
   MOZ_RELEASE_ASSERT(aGraph);
 
   // MediaRecorders use an AudioNodeStream, but no AudioNode
   AudioNode* node = aEngine->NodeMainThread();
 
   RefPtr<AudioNodeStream> stream =
-    new AudioNodeStream(aEngine, aFlags, aGraph->GraphRate(),
-      aCtx->GetOwnerGlobal()->AbstractMainThreadFor(TaskCategory::Other));
+    new AudioNodeStream(aEngine, aFlags, aGraph->GraphRate());
   stream->mSuspendedCount += aCtx->ShouldSuspendNewStream();
   if (node) {
     stream->SetChannelMixingParametersImpl(node->ChannelCount(),
                                            node->ChannelCountModeValue(),
                                            node->ChannelInterpretationValue());
   }
   aGraph->AddStream(stream);
   return stream.forget();
--- a/dom/media/webaudio/AudioNodeStream.h
+++ b/dom/media/webaudio/AudioNodeStream.h
@@ -71,18 +71,17 @@ public:
          MediaStreamGraph* aGraph);
 
 protected:
   /**
    * Transfers ownership of aEngine to the new AudioNodeStream.
    */
   AudioNodeStream(AudioNodeEngine* aEngine,
                   Flags aFlags,
-                  TrackRate aSampleRate,
-                  AbstractThread* aMainThread);
+                  TrackRate aSampleRate);
 
   ~AudioNodeStream();
 
 public:
   // Control API
   /**
    * Sets a parameter that's a time relative to some stream's played time.
    * This time is converted to a time relative to this stream when it's set.
--- a/dom/media/webaudio/BiquadFilterNode.cpp
+++ b/dom/media/webaudio/BiquadFilterNode.cpp
@@ -154,34 +154,32 @@ public:
       }
       if (!hasTail) {
         if (!mBiquads.IsEmpty()) {
           mBiquads.Clear();
           aStream->ScheduleCheckForInactive();
 
           RefPtr<PlayingRefChangeHandler> refchanged =
             new PlayingRefChangeHandler(aStream, PlayingRefChangeHandler::RELEASE);
-          aStream->Graph()->
-            DispatchToMainThreadAfterStreamStateUpdate(mAbstractMainThread,
-                                                       refchanged.forget());
+          aStream->Graph()->DispatchToMainThreadAfterStreamStateUpdate(
+            refchanged.forget());
         }
 
         aOutput->SetNull(WEBAUDIO_BLOCK_SIZE);
         return;
       }
 
       PodArrayZero(inputBuffer);
 
     } else if(mBiquads.Length() != aInput.ChannelCount()){
       if (mBiquads.IsEmpty()) {
         RefPtr<PlayingRefChangeHandler> refchanged =
           new PlayingRefChangeHandler(aStream, PlayingRefChangeHandler::ADDREF);
-        aStream->Graph()->
-          DispatchToMainThreadAfterStreamStateUpdate(mAbstractMainThread,
-                                                     refchanged.forget());
+        aStream->Graph()->DispatchToMainThreadAfterStreamStateUpdate(
+          refchanged.forget());
       } else { // Help people diagnose bug 924718
         WebAudioUtils::LogToDeveloperConsole(mWindowID,
                                              "BiquadFilterChannelCountChangeWarning");
       }
 
       // Adjust the number of biquads based on the number of channels
       mBiquads.SetLength(aInput.ChannelCount());
     }
--- a/dom/media/webaudio/ConvolverNode.cpp
+++ b/dom/media/webaudio/ConvolverNode.cpp
@@ -120,19 +120,18 @@ public:
         input.AllocateChannels(1);
         WriteZeroesToAudioBlock(&input, 0, WEBAUDIO_BLOCK_SIZE);
       } else {
         if (mLeftOverData != INT32_MIN) {
           mLeftOverData = INT32_MIN;
           aStream->ScheduleCheckForInactive();
           RefPtr<PlayingRefChanged> refchanged =
             new PlayingRefChanged(aStream, PlayingRefChanged::RELEASE);
-          aStream->Graph()->
-            DispatchToMainThreadAfterStreamStateUpdate(mAbstractMainThread,
-                                                       refchanged.forget());
+          aStream->Graph()->DispatchToMainThreadAfterStreamStateUpdate(
+            refchanged.forget());
         }
         aOutput->SetNull(WEBAUDIO_BLOCK_SIZE);
         return;
       }
     } else {
       if (aInput.mVolume != 1.0f) {
         // Pre-multiply the input's volume
         uint32_t numChannels = aInput.ChannelCount();
@@ -142,19 +141,18 @@ public:
           float* dest = input.ChannelFloatsForWrite(i);
           AudioBlockCopyChannelWithScale(src, aInput.mVolume, dest);
         }
       }
 
       if (mLeftOverData <= 0) {
         RefPtr<PlayingRefChanged> refchanged =
           new PlayingRefChanged(aStream, PlayingRefChanged::ADDREF);
-        aStream->Graph()->
-          DispatchToMainThreadAfterStreamStateUpdate(mAbstractMainThread,
-                                                     refchanged.forget());
+        aStream->Graph()->DispatchToMainThreadAfterStreamStateUpdate(
+          refchanged.forget());
       }
       mLeftOverData = mBufferLength;
       MOZ_ASSERT(mLeftOverData > 0);
     }
     aOutput->AllocateChannels(2);
 
     mReverb->process(&input, aOutput);
   }
--- a/dom/media/webaudio/DelayNode.cpp
+++ b/dom/media/webaudio/DelayNode.cpp
@@ -77,36 +77,34 @@ public:
                     bool* aFinished) override
   {
     MOZ_ASSERT(aStream->SampleRate() == mDestination->SampleRate());
 
     if (!aInput.IsSilentOrSubnormal()) {
       if (mLeftOverData <= 0) {
         RefPtr<PlayingRefChanged> refchanged =
           new PlayingRefChanged(aStream, PlayingRefChanged::ADDREF);
-        aStream->Graph()->
-          DispatchToMainThreadAfterStreamStateUpdate(mAbstractMainThread,
-                                                     refchanged.forget());
+        aStream->Graph()->DispatchToMainThreadAfterStreamStateUpdate(
+          refchanged.forget());
       }
       mLeftOverData = mBuffer.MaxDelayTicks();
     } else if (mLeftOverData > 0) {
       mLeftOverData -= WEBAUDIO_BLOCK_SIZE;
     } else {
       if (mLeftOverData != INT32_MIN) {
         mLeftOverData = INT32_MIN;
         aStream->ScheduleCheckForInactive();
 
         // Delete our buffered data now we no longer need it
         mBuffer.Reset();
 
         RefPtr<PlayingRefChanged> refchanged =
           new PlayingRefChanged(aStream, PlayingRefChanged::RELEASE);
-        aStream->Graph()->
-          DispatchToMainThreadAfterStreamStateUpdate(mAbstractMainThread,
-                                                     refchanged.forget());
+        aStream->Graph()->DispatchToMainThreadAfterStreamStateUpdate(
+          refchanged.forget());
       }
       aOutput->SetNull(WEBAUDIO_BLOCK_SIZE);
       return;
     }
 
     mBuffer.Write(aInput);
 
     // Skip output update if mLastChunks has already been set by
--- a/dom/media/webaudio/IIRFilterNode.cpp
+++ b/dom/media/webaudio/IIRFilterNode.cpp
@@ -51,33 +51,31 @@ public:
         // all filter buffer values are zero, so the output will be zero
         // as well.
         if (allZero) {
           mIIRFilters.Clear();
           aStream->ScheduleCheckForInactive();
 
           RefPtr<PlayingRefChangeHandler> refchanged =
             new PlayingRefChangeHandler(aStream, PlayingRefChangeHandler::RELEASE);
-          aStream->Graph()->
-            DispatchToMainThreadAfterStreamStateUpdate(mAbstractMainThread,
-                                                       refchanged.forget());
+          aStream->Graph()->DispatchToMainThreadAfterStreamStateUpdate(
+            refchanged.forget());
 
           aOutput->SetNull(WEBAUDIO_BLOCK_SIZE);
           return;
         }
 
         PodZero(alignedInputBuffer, WEBAUDIO_BLOCK_SIZE);
       }
     } else if(mIIRFilters.Length() != aInput.ChannelCount()){
       if (mIIRFilters.IsEmpty()) {
         RefPtr<PlayingRefChangeHandler> refchanged =
           new PlayingRefChangeHandler(aStream, PlayingRefChangeHandler::ADDREF);
-        aStream->Graph()->
-          DispatchToMainThreadAfterStreamStateUpdate(mAbstractMainThread,
-                                                     refchanged.forget());
+        aStream->Graph()->DispatchToMainThreadAfterStreamStateUpdate(
+          refchanged.forget());
       } else {
         WebAudioUtils::LogToDeveloperConsole(mWindowID,
                                              "IIRFilterChannelCountChangeWarning");
       }
 
       // Adjust the number of filters based on the number of channels
       mIIRFilters.SetLength(aInput.ChannelCount());
       for (size_t i = 0; i < aInput.ChannelCount(); ++i) {
--- a/dom/media/webaudio/MediaStreamAudioSourceNode.cpp
+++ b/dom/media/webaudio/MediaStreamAudioSourceNode.cpp
@@ -79,19 +79,17 @@ MediaStreamAudioSourceNode::Init(DOMMedi
   MediaStreamGraph* graph = Context()->Graph();
   if (NS_WARN_IF(graph != inputStream->Graph())) {
     aRv.Throw(NS_ERROR_DOM_NOT_SUPPORTED_ERR);
     return;
   }
 
   mInputStream = aMediaStream;
   AudioNodeEngine* engine = new MediaStreamAudioSourceNodeEngine(this);
-  mStream =
-    AudioNodeExternalInputStream::Create(graph, engine,
-                                         aMediaStream->AbstractMainThread());
+  mStream = AudioNodeExternalInputStream::Create(graph, engine);
   mInputStream->AddConsumerToKeepAlive(static_cast<nsIDOMEventTarget*>(this));
 
   mInputStream->RegisterTrackListener(this);
   AttachToFirstTrack(mInputStream);
 }
 
 void
 MediaStreamAudioSourceNode::Destroy()
--- a/dom/media/webaudio/PannerNode.cpp
+++ b/dom/media/webaudio/PannerNode.cpp
@@ -203,30 +203,28 @@ public:
       } else {
         if (mLeftOverData != INT_MIN) {
           mLeftOverData = INT_MIN;
           aStream->ScheduleCheckForInactive();
           mHRTFPanner->reset();
 
           RefPtr<PlayingRefChangeHandler> refchanged =
             new PlayingRefChangeHandler(aStream, PlayingRefChangeHandler::RELEASE);
-          aStream->Graph()->
-          DispatchToMainThreadAfterStreamStateUpdate(mAbstractMainThread,
-                                                     refchanged.forget());
+          aStream->Graph()->DispatchToMainThreadAfterStreamStateUpdate(
+            refchanged.forget());
         }
         aOutput->SetNull(WEBAUDIO_BLOCK_SIZE);
         return;
       }
     } else if (mPanningModelFunction == &PannerNodeEngine::HRTFPanningFunction) {
       if (mLeftOverData == INT_MIN) {
         RefPtr<PlayingRefChangeHandler> refchanged =
           new PlayingRefChangeHandler(aStream, PlayingRefChangeHandler::ADDREF);
-        aStream->Graph()->
-          DispatchToMainThreadAfterStreamStateUpdate(mAbstractMainThread,
-                                                     refchanged.forget());
+        aStream->Graph()->DispatchToMainThreadAfterStreamStateUpdate(
+          refchanged.forget());
       }
       mLeftOverData = mHRTFPanner->maxTailFrames();
     }
 
     StreamTime tick = mDestination->GraphTimeToStreamTime(aFrom);
     (this->*mPanningModelFunction)(aInput, aOutput, tick);
   }
 
--- a/dom/media/webspeech/synth/nsSpeechTask.cpp
+++ b/dom/media/webspeech/synth/nsSpeechTask.cpp
@@ -32,17 +32,16 @@ class SynthStreamListener : public Media
 {
 public:
   SynthStreamListener(nsSpeechTask* aSpeechTask,
                       MediaStream* aStream,
                       AbstractThread* aMainThread)
     : mSpeechTask(aSpeechTask)
     , mStream(aStream)
     , mStarted(false)
-    , mAbstractMainThread(aMainThread)
   {
   }
 
   void DoNotifyStarted()
   {
     if (mSpeechTask) {
       mSpeechTask->DispatchStartInner();
     }
@@ -60,24 +59,22 @@ public:
                    MediaStreamGraphEvent event) override
   {
     switch (event) {
       case MediaStreamGraphEvent::EVENT_FINISHED:
         {
           if (!mStarted) {
             mStarted = true;
             aGraph->DispatchToMainThreadAfterStreamStateUpdate(
-              mAbstractMainThread,
               NewRunnableMethod("dom::SynthStreamListener::DoNotifyStarted",
                                 this,
                                 &SynthStreamListener::DoNotifyStarted));
           }
 
           aGraph->DispatchToMainThreadAfterStreamStateUpdate(
-            mAbstractMainThread,
             NewRunnableMethod("dom::SynthStreamListener::DoNotifyFinished",
                               this,
                               &SynthStreamListener::DoNotifyFinished));
         }
         break;
       case MediaStreamGraphEvent::EVENT_REMOVED:
         mSpeechTask = nullptr;
         // Dereference MediaStream to destroy safety
@@ -88,33 +85,30 @@ public:
     }
   }
 
   void NotifyBlockingChanged(MediaStreamGraph* aGraph, Blocking aBlocked) override
   {
     if (aBlocked == MediaStreamListener::UNBLOCKED && !mStarted) {
       mStarted = true;
       aGraph->DispatchToMainThreadAfterStreamStateUpdate(
-        mAbstractMainThread,
         NewRunnableMethod("dom::SynthStreamListener::DoNotifyStarted",
                           this,
                           &SynthStreamListener::DoNotifyStarted));
     }
   }
 
 private:
   // Raw pointer; if we exist, the stream exists,
   // and 'mSpeechTask' exclusively owns it and therefor exists as well.
   nsSpeechTask* mSpeechTask;
   // This is KungFuDeathGrip for MediaStream
   RefPtr<MediaStream> mStream;
 
   bool mStarted;
-
-  const RefPtr<AbstractThread> mAbstractMainThread;
 };
 
 // nsSpeechTask
 
 NS_IMPL_CYCLE_COLLECTION(nsSpeechTask, mSpeechSynthesis, mUtterance, mCallback);
 
 NS_INTERFACE_MAP_BEGIN_CYCLE_COLLECTION(nsSpeechTask)
   NS_INTERFACE_MAP_ENTRY(nsISpeechTask)
@@ -171,17 +165,17 @@ nsSpeechTask::~nsSpeechTask()
 
 void
 nsSpeechTask::InitDirectAudio()
 {
   // nullptr as final argument here means that this is not tied to a window.
   // This is a global MSG.
   mStream = MediaStreamGraph::GetInstance(MediaStreamGraph::AUDIO_THREAD_DRIVER,
                                           AudioChannel::Normal, nullptr)->
-    CreateSourceStream(AbstractThread::MainThread() /* Non DocGroup-version for the task in parent. */);
+    CreateSourceStream();
   mIndirectAudio = false;
   mInited = true;
 }
 
 void
 nsSpeechTask::InitIndirectAudio()
 {
   mIndirectAudio = true;
--- a/media/webrtc/signaling/gtest/mediapipeline_unittest.cpp
+++ b/media/webrtc/signaling/gtest/mediapipeline_unittest.cpp
@@ -38,17 +38,17 @@ MtransportTestUtils *test_utils;
 
 namespace {
 
 class FakeSourceMediaStream : public mozilla::SourceMediaStream {
 
 public:
 
   FakeSourceMediaStream()
-    : SourceMediaStream(nullptr)
+    : SourceMediaStream()
   {
   }
 
   virtual ~FakeSourceMediaStream() override
   {
     mMainThreadDestroyed = true;
   }