Bug 1184634 - Rename "TaskQueue()" accessor to "OwnerThread()". r=gerald
authorBobby Holley <bobbyholley@gmail.com>
Thu, 16 Jul 2015 11:31:21 -0700
changeset 253451 e522f33ca3e630353d9480e7efe52d533ed5977e
parent 253450 dc714b35dd0419b9ac6951217562aeffa921157a
child 253452 9f3cad5f3d77fc1a149a16d9d311ee6914013dbb
push id29065
push userryanvm@gmail.com
push dateFri, 17 Jul 2015 14:26:32 +0000
treeherdermozilla-central@911935404233 [default view] [failures only]
perfherder[talos] [build metrics] [platform microbench] (compared to previous push)
reviewersgerald
bugs1184634
milestone42.0a1
first release with
nightly linux32
nightly linux64
nightly mac
nightly win32
nightly win64
last release without
nightly linux32
nightly linux64
nightly mac
nightly win32
nightly win64
Bug 1184634 - Rename "TaskQueue()" accessor to "OwnerThread()". r=gerald Otherwise this name will collide with the rename of MediaTaskQueue to TaskQueue. It's also a better naming convention, because it generalizes to things that are owned by an AbstractThread that is not a Task Queue. We rename to Queue() in TestMozPromise, because that's more accurate.
dom/media/MediaDecoder.cpp
dom/media/MediaDecoderReader.cpp
dom/media/MediaDecoderReader.h
dom/media/MediaDecoderStateMachine.cpp
dom/media/MediaDecoderStateMachine.h
dom/media/MediaFormatReader.cpp
dom/media/android/AndroidMediaReader.cpp
dom/media/fmp4/MP4Reader.cpp
dom/media/gtest/TestMP4Reader.cpp
dom/media/gtest/TestMozPromise.cpp
dom/media/mediasource/MediaSourceReader.cpp
dom/media/mediasource/TrackBuffer.cpp
dom/media/omx/MediaCodecReader.cpp
dom/media/omx/MediaOmxCommonDecoder.cpp
dom/media/omx/MediaOmxReader.cpp
dom/media/raw/RawReader.cpp
dom/media/webaudio/MediaBufferDecoder.cpp
--- a/dom/media/MediaDecoder.cpp
+++ b/dom/media/MediaDecoder.cpp
@@ -230,33 +230,33 @@ void MediaDecoder::UpdateDormantState(bo
   if (mIsDormant) {
     DECODER_LOG("UpdateDormantState() entering DORMANT state");
     // enter dormant state
     RefPtr<nsRunnable> event =
       NS_NewRunnableMethodWithArg<bool>(
         mDecoderStateMachine,
         &MediaDecoderStateMachine::SetDormant,
         true);
-    mDecoderStateMachine->TaskQueue()->Dispatch(event.forget());
+    mDecoderStateMachine->OwnerThread()->Dispatch(event.forget());
 
     if (IsEnded()) {
       mWasEndedWhenEnteredDormant = true;
     }
     mNextState = mPlayState;
     ChangeState(PLAY_STATE_LOADING);
   } else {
     DECODER_LOG("UpdateDormantState() leaving DORMANT state");
     // exit dormant state
     // trigger to state machine.
     RefPtr<nsRunnable> event =
       NS_NewRunnableMethodWithArg<bool>(
         mDecoderStateMachine,
         &MediaDecoderStateMachine::SetDormant,
         false);
-    mDecoderStateMachine->TaskQueue()->Dispatch(event.forget());
+    mDecoderStateMachine->OwnerThread()->Dispatch(event.forget());
   }
 }
 
 void MediaDecoder::DormantTimerExpired(nsITimer* aTimer, void* aClosure)
 {
   MOZ_ASSERT(NS_IsMainThread());
   MOZ_ASSERT(aClosure);
   MediaDecoder* decoder = static_cast<MediaDecoder*>(aClosure);
@@ -618,17 +618,17 @@ nsresult MediaDecoder::Seek(double aTime
   }
   return NS_OK;
 }
 
 void MediaDecoder::CallSeek(const SeekTarget& aTarget)
 {
   MOZ_ASSERT(NS_IsMainThread());
   mSeekRequest.DisconnectIfExists();
-  mSeekRequest.Begin(ProxyMediaCall(mDecoderStateMachine->TaskQueue(),
+  mSeekRequest.Begin(ProxyMediaCall(mDecoderStateMachine->OwnerThread(),
                                     mDecoderStateMachine.get(), __func__,
                                     &MediaDecoderStateMachine::Seek, aTarget)
     ->Then(AbstractThread::MainThread(), __func__, this,
            &MediaDecoder::OnSeekResolved, &MediaDecoder::OnSeekRejected));
 }
 
 double MediaDecoder::GetCurrentTime()
 {
@@ -1371,17 +1371,17 @@ MediaDecoderStateMachine* MediaDecoder::
 
 void
 MediaDecoder::NotifyWaitingForResourcesStatusChanged()
 {
   if (mDecoderStateMachine) {
     RefPtr<nsRunnable> task =
       NS_NewRunnableMethod(mDecoderStateMachine,
                            &MediaDecoderStateMachine::NotifyWaitingForResourcesStatusChanged);
-    mDecoderStateMachine->TaskQueue()->Dispatch(task.forget());
+    mDecoderStateMachine->OwnerThread()->Dispatch(task.forget());
   }
 }
 
 bool MediaDecoder::IsShutdown() const {
   MOZ_ASSERT(NS_IsMainThread());
   NS_ENSURE_TRUE(GetStateMachine(), true);
   return mStateMachineIsShutdown;
 }
--- a/dom/media/MediaDecoderReader.cpp
+++ b/dom/media/MediaDecoderReader.cpp
@@ -161,17 +161,17 @@ MediaDecoderReader::DecodeToFirstVideoDa
     if (!self->DecodeVideoFrame(skip, 0)) {
       self->VideoQueue().Finish();
       return !!self->VideoQueue().PeekFront();
     }
     return true;
   }, [self] () -> bool {
     MOZ_ASSERT(self->OnTaskQueue());
     return self->VideoQueue().GetSize();
-  })->Then(TaskQueue(), __func__, [self, p] () {
+  })->Then(OwnerThread(), __func__, [self, p] () {
     p->Resolve(self->VideoQueue().PeekFront(), __func__);
   }, [p] () {
     // We don't have a way to differentiate EOS, error, and shutdown here. :-(
     p->Reject(END_OF_STREAM, __func__);
   });
 
   return p.forget();
 }
@@ -202,17 +202,17 @@ MediaDecoderReader::ThrottledNotifyDataA
   // If it's been long enough since our last update, do it.
   if (TimeStamp::Now() - mLastThrottledNotify > mThrottleDuration) {
     DoThrottledNotify();
   } else if (!mThrottledNotify.Exists()) {
     // Otherwise, schedule an update if one isn't scheduled already.
     nsRefPtr<MediaDecoderReader> self = this;
     mThrottledNotify.Begin(
       mTimer->WaitUntil(mLastThrottledNotify + mThrottleDuration, __func__)
-      ->Then(TaskQueue(), __func__,
+      ->Then(OwnerThread(), __func__,
              [self] () -> void {
                self->mThrottledNotify.Complete();
                NS_ENSURE_TRUE_VOID(!self->mShutdown);
                self->DoThrottledNotify();
              },
              [self] () -> void {
                self->mThrottledNotify.Complete();
                NS_WARNING("throttle callback rejected");
--- a/dom/media/MediaDecoderReader.h
+++ b/dom/media/MediaDecoderReader.h
@@ -110,17 +110,17 @@ public:
   // Destroys the decoding state. The reader cannot be made usable again.
   // This is different from ReleaseMediaResources() as it is irreversable,
   // whereas ReleaseMediaResources() is.  Must be called on the decode
   // thread.
   virtual nsRefPtr<ShutdownPromise> Shutdown();
 
   virtual bool OnTaskQueue()
   {
-    return TaskQueue()->IsCurrentThreadIn();
+    return OwnerThread()->IsCurrentThreadIn();
   }
 
   // Resets all state related to decoding, emptying all buffers etc.
   // Cancels all pending Request*Data() request callbacks, rejects any
   // outstanding seek promises, and flushes the decode pipeline. The
   // decoder must not call any of the callbacks for outstanding
   // Request*Data() calls after this is called. Calls to Request*Data()
   // made after this should be processed as usual.
@@ -268,17 +268,17 @@ public:
   // updates to be observable immediately, and is generally less
   // trigger-happy with notifications anyway.
   void DispatchNotifyDataArrived(uint32_t aLength, int64_t aOffset, bool aThrottleUpdates)
   {
     RefPtr<nsRunnable> r =
       NS_NewRunnableMethodWithArg<media::Interval<int64_t>>(this, aThrottleUpdates ? &MediaDecoderReader::ThrottledNotifyDataArrived
                                                                                    : &MediaDecoderReader::NotifyDataArrived,
                                                             media::Interval<int64_t>(aOffset, aOffset + aLength));
-    TaskQueue()->Dispatch(r.forget(), AbstractThread::DontAssertDispatchSuccess);
+    OwnerThread()->Dispatch(r.forget(), AbstractThread::DontAssertDispatchSuccess);
   }
 
   // Notify the reader that data from the resource was evicted (MediaSource only)
   virtual void NotifyDataRemoved() {}
   virtual int64_t GetEvictionOffset(double aTime) { return -1; }
 
   virtual MediaQueue<AudioData>& AudioQueue() { return mAudioQueue; }
   virtual MediaQueue<VideoData>& VideoQueue() { return mVideoQueue; }
@@ -302,20 +302,20 @@ public:
     nsCOMPtr<nsIRunnable> r =
       NS_NewRunnableFunction([self, aStartTime] () -> void
     {
       MOZ_ASSERT(self->OnTaskQueue());
       MOZ_ASSERT(!self->HaveStartTime());
       self->mStartTime.emplace(aStartTime);
       self->UpdateBuffered();
     });
-    TaskQueue()->Dispatch(r.forget());
+    OwnerThread()->Dispatch(r.forget());
   }
 
-  MediaTaskQueue* TaskQueue() {
+  MediaTaskQueue* OwnerThread() {
     return mTaskQueue;
   }
 
   // Returns true if the reader implements RequestAudioData()
   // and RequestVideoData() asynchronously, rather than using the
   // implementation in this class to adapt the old synchronous to
   // the newer async model.
   virtual bool IsAsync() const { return false; }
--- a/dom/media/MediaDecoderStateMachine.cpp
+++ b/dom/media/MediaDecoderStateMachine.cpp
@@ -729,17 +729,17 @@ MediaDecoderStateMachine::OnNotDecoded(M
   // If the decoder is waiting for data, we tell it to call us back when the
   // data arrives.
   if (aReason == MediaDecoderReader::WAITING_FOR_DATA) {
     MOZ_ASSERT(mReader->IsWaitForDataSupported(),
                "Readers that send WAITING_FOR_DATA need to implement WaitForData");
     nsRefPtr<MediaDecoderStateMachine> self = this;
     WaitRequestRef(aType).Begin(ProxyMediaCall(DecodeTaskQueue(), mReader.get(), __func__,
                                                &MediaDecoderReader::WaitForData, aType)
-      ->Then(TaskQueue(), __func__,
+      ->Then(OwnerThread(), __func__,
              [self] (MediaData::Type aType) -> void {
                ReentrantMonitorAutoEnter mon(self->mDecoder->GetReentrantMonitor());
                self->WaitRequestRef(aType).Complete();
                self->DispatchDecodeTasksIfNeeded();
              },
              [self] (WaitForDataRejectValue aRejection) -> void {
                ReentrantMonitorAutoEnter mon(self->mDecoder->GetReentrantMonitor());
                self->WaitRequestRef(aRejection.mType).Complete();
@@ -1299,17 +1299,17 @@ void MediaDecoderStateMachine::Shutdown(
   // Shut down our start time rendezvous.
   if (mStartTimeRendezvous) {
     mStartTimeRendezvous->Destroy();
   }
 
   // Put a task in the decode queue to shutdown the reader.
   // the queue to spin down.
   ProxyMediaCall(DecodeTaskQueue(), mReader.get(), __func__, &MediaDecoderReader::Shutdown)
-    ->Then(TaskQueue(), __func__, this,
+    ->Then(OwnerThread(), __func__, this,
            &MediaDecoderStateMachine::FinishShutdown,
            &MediaDecoderStateMachine::FinishShutdown);
   DECODER_LOG("Shutdown started");
 }
 
 void MediaDecoderStateMachine::StartDecoding()
 {
   MOZ_ASSERT(OnTaskQueue());
@@ -1486,17 +1486,17 @@ nsresult
 MediaDecoderStateMachine::EnqueueDecodeFirstFrameTask()
 {
   MOZ_ASSERT(OnTaskQueue());
   AssertCurrentThreadInMonitor();
   MOZ_ASSERT(mState == DECODER_STATE_DECODING_FIRSTFRAME);
 
   nsCOMPtr<nsIRunnable> task(
     NS_NewRunnableMethod(this, &MediaDecoderStateMachine::CallDecodeFirstFrame));
-  TaskQueue()->Dispatch(task.forget());
+  OwnerThread()->Dispatch(task.forget());
   return NS_OK;
 }
 
 void
 MediaDecoderStateMachine::DispatchDecodeTasksIfNeeded()
 {
   MOZ_ASSERT(OnTaskQueue());
   AssertCurrentThreadInMonitor();
@@ -1605,17 +1605,17 @@ MediaDecoderStateMachine::InitiateSeek()
   // Reset our state machine and decoding pipeline before seeking.
   Reset();
 
   // Do the seek.
   nsRefPtr<MediaDecoderStateMachine> self = this;
   mSeekRequest.Begin(ProxyMediaCall(DecodeTaskQueue(), mReader.get(), __func__,
                                     &MediaDecoderReader::Seek, mCurrentSeek.mTarget.mTime,
                                     Duration().ToMicroseconds())
-    ->Then(TaskQueue(), __func__,
+    ->Then(OwnerThread(), __func__,
            [self] (int64_t) -> void {
              ReentrantMonitorAutoEnter mon(self->mDecoder->GetReentrantMonitor());
              self->mSeekRequest.Complete();
              // We must decode the first samples of active streams, so we can determine
              // the new stream time. So dispatch tasks to do that.
              self->mDecodeToSeekTarget = true;
              self->DispatchDecodeTasksIfNeeded();
            }, [self] (nsresult aResult) -> void {
@@ -1664,17 +1664,17 @@ MediaDecoderStateMachine::EnsureAudioDec
     return NS_OK;
   }
 
   SAMPLE_LOG("Queueing audio task - queued=%i, decoder-queued=%o",
              AudioQueue().GetSize(), mReader->SizeOfAudioQueueInFrames());
 
   mAudioDataRequest.Begin(ProxyMediaCall(DecodeTaskQueue(), mReader.get(),
                                          __func__, &MediaDecoderReader::RequestAudioData)
-    ->Then(TaskQueue(), __func__, this,
+    ->Then(OwnerThread(), __func__, this,
            &MediaDecoderStateMachine::OnAudioDecoded,
            &MediaDecoderStateMachine::OnAudioNotDecoded));
 
   return NS_OK;
 }
 
 nsresult
 MediaDecoderStateMachine::DispatchVideoDecodeTaskIfNeeded()
@@ -1725,17 +1725,17 @@ MediaDecoderStateMachine::EnsureVideoDec
 
   SAMPLE_LOG("Queueing video task - queued=%i, decoder-queued=%o, skip=%i, time=%lld",
              VideoQueue().GetSize(), mReader->SizeOfVideoQueueInFrames(), skipToNextKeyFrame,
              currentTime);
 
   mVideoDataRequest.Begin(ProxyMediaCall(DecodeTaskQueue(), mReader.get(), __func__,
                                          &MediaDecoderReader::RequestVideoData,
                                          skipToNextKeyFrame, currentTime, forceDecodeAhead)
-    ->Then(TaskQueue(), __func__, this,
+    ->Then(OwnerThread(), __func__, this,
            &MediaDecoderStateMachine::OnVideoDecoded,
            &MediaDecoderStateMachine::OnVideoNotDecoded));
   return NS_OK;
 }
 
 nsresult
 MediaDecoderStateMachine::StartAudioThread()
 {
@@ -1886,32 +1886,32 @@ MediaDecoderStateMachine::OnMetadataRead
   mDecoder->SetMediaSeekable(mReader->IsMediaSeekable());
   mInfo = aMetadata->mInfo;
   mMetadataTags = aMetadata->mTags.forget();
   nsRefPtr<MediaDecoderStateMachine> self = this;
 
   // Set up the start time rendezvous if it doesn't already exist (which is
   // generally the case, unless we're coming out of dormant mode).
   if (!mStartTimeRendezvous) {
-    mStartTimeRendezvous = new StartTimeRendezvous(TaskQueue(), HasAudio(), HasVideo(),
+    mStartTimeRendezvous = new StartTimeRendezvous(OwnerThread(), HasAudio(), HasVideo(),
                                                    mReader->ForceZeroStartTime() || IsRealTime());
 
-    mStartTimeRendezvous->AwaitStartTime()->Then(TaskQueue(), __func__,
+    mStartTimeRendezvous->AwaitStartTime()->Then(OwnerThread(), __func__,
       [self] () -> void {
         NS_ENSURE_TRUE_VOID(!self->IsShutdown());
         self->mReader->DispatchSetStartTime(self->StartTime());
       },
       [] () -> void { NS_WARNING("Setting start time on reader failed"); }
     );
   }
 
   if (mInfo.mMetadataDuration.isSome()) {
     RecomputeDuration();
   } else if (mInfo.mUnadjustedMetadataEndTime.isSome()) {
-    mStartTimeRendezvous->AwaitStartTime()->Then(TaskQueue(), __func__,
+    mStartTimeRendezvous->AwaitStartTime()->Then(OwnerThread(), __func__,
       [self] () -> void {
         NS_ENSURE_TRUE_VOID(!self->IsShutdown());
         TimeUnit unadjusted = self->mInfo.mUnadjustedMetadataEndTime.ref();
         TimeUnit adjustment = TimeUnit::FromMicroseconds(self->StartTime());
         self->mInfo.mMetadataDuration.emplace(unadjusted - adjustment);
         self->RecomputeDuration();
       }, [] () -> void { NS_WARNING("Adjusting metadata end time failed"); }
     );
@@ -2027,35 +2027,35 @@ MediaDecoderStateMachine::DecodeFirstFra
     // we have the start time from last time we loaded.
     nsresult res = FinishDecodeFirstFrame();
     NS_ENSURE_SUCCESS(res, res);
   } else {
     if (HasAudio()) {
       mAudioDataRequest.Begin(
         ProxyMediaCall(DecodeTaskQueue(), mReader.get(), __func__,
                        &MediaDecoderReader::RequestAudioData)
-        ->Then(TaskQueue(), __func__, mStartTimeRendezvous.get(),
+        ->Then(OwnerThread(), __func__, mStartTimeRendezvous.get(),
                &StartTimeRendezvous::ProcessFirstSample<AudioDataPromise>,
                &StartTimeRendezvous::FirstSampleRejected<AudioData>)
         ->CompletionPromise()
-        ->Then(TaskQueue(), __func__, this,
+        ->Then(OwnerThread(), __func__, this,
                &MediaDecoderStateMachine::OnAudioDecoded,
                &MediaDecoderStateMachine::OnAudioNotDecoded)
       );
     }
     if (HasVideo()) {
       mVideoDecodeStartTime = TimeStamp::Now();
       mVideoDataRequest.Begin(
         ProxyMediaCall(DecodeTaskQueue(), mReader.get(), __func__,
                        &MediaDecoderReader::RequestVideoData, false, int64_t(0), false)
-        ->Then(TaskQueue(), __func__, mStartTimeRendezvous.get(),
+        ->Then(OwnerThread(), __func__, mStartTimeRendezvous.get(),
                &StartTimeRendezvous::ProcessFirstSample<VideoDataPromise>,
                &StartTimeRendezvous::FirstSampleRejected<VideoData>)
         ->CompletionPromise()
-        ->Then(TaskQueue(), __func__, this,
+        ->Then(OwnerThread(), __func__, this,
                &MediaDecoderStateMachine::OnVideoDecoded,
                &MediaDecoderStateMachine::OnVideoNotDecoded));
     }
   }
 
   return NS_OK;
 }
 
@@ -2260,20 +2260,20 @@ MediaDecoderStateMachine::FinishShutdown
   // event runner running this function (which holds a reference to the
   // state machine) needs to finish and be released in order to allow
   // that. So we dispatch an event to run after this event runner has
   // finished and released its monitor/references. That event then will
   // dispatch an event to the main thread to release the decoder and
   // state machine.
   DECODER_LOG("Shutting down state machine task queue");
   RefPtr<DecoderDisposer> disposer = new DecoderDisposer(mDecoder, this);
-  TaskQueue()->BeginShutdown()->Then(AbstractThread::MainThread(), __func__,
-                                     disposer.get(),
-                                     &DecoderDisposer::OnTaskQueueShutdown,
-                                     &DecoderDisposer::OnTaskQueueShutdown);
+  OwnerThread()->BeginShutdown()->Then(AbstractThread::MainThread(), __func__,
+                                       disposer.get(),
+                                       &DecoderDisposer::OnTaskQueueShutdown,
+                                       &DecoderDisposer::OnTaskQueueShutdown);
 }
 
 nsresult MediaDecoderStateMachine::RunStateMachine()
 {
   MOZ_ASSERT(OnTaskQueue());
   ReentrantMonitorAutoEnter mon(mDecoder->GetReentrantMonitor());
 
   mDelayedScheduler.Reset(); // Must happen on state machine task queue.
@@ -2296,17 +2296,17 @@ nsresult MediaDecoderStateMachine::RunSt
       return NS_OK;
     }
 
     case DECODER_STATE_DECODING_METADATA: {
       if (!mMetadataRequest.Exists()) {
         DECODER_LOG("Dispatching AsyncReadMetadata");
         mMetadataRequest.Begin(ProxyMediaCall(DecodeTaskQueue(), mReader.get(), __func__,
                                               &MediaDecoderReader::AsyncReadMetadata)
-          ->Then(TaskQueue(), __func__, this,
+          ->Then(OwnerThread(), __func__, this,
                  &MediaDecoderStateMachine::OnMetadataRead,
                  &MediaDecoderStateMachine::OnMetadataNotRead));
 
       }
       return NS_OK;
     }
 
     case DECODER_STATE_DECODING_FIRSTFRAME: {
@@ -2960,17 +2960,17 @@ MediaDecoderStateMachine::ScheduleStateM
   AssertCurrentThreadInMonitor();
   if (mDispatchedStateMachine) {
     return;
   }
   mDispatchedStateMachine = true;
 
   nsCOMPtr<nsIRunnable> task =
     NS_NewRunnableMethod(this, &MediaDecoderStateMachine::RunStateMachine);
-  TaskQueue()->Dispatch(task.forget());
+  OwnerThread()->Dispatch(task.forget());
 }
 
 void
 MediaDecoderStateMachine::ScheduleStateMachineIn(int64_t aMicroseconds)
 {
   AssertCurrentThreadInMonitor();
   MOZ_ASSERT(OnTaskQueue());          // mDelayedScheduler.Ensure() may Disconnect()
                                       // the promise, which must happen on the state
@@ -2994,17 +2994,17 @@ MediaDecoderStateMachine::ScheduleStateM
 
 bool MediaDecoderStateMachine::OnDecodeTaskQueue() const
 {
   return !DecodeTaskQueue() || DecodeTaskQueue()->IsCurrentThreadIn();
 }
 
 bool MediaDecoderStateMachine::OnTaskQueue() const
 {
-  return TaskQueue()->IsCurrentThreadIn();
+  return OwnerThread()->IsCurrentThreadIn();
 }
 
 bool MediaDecoderStateMachine::IsStateMachineScheduled() const
 {
   MOZ_ASSERT(OnTaskQueue());
   return mDispatchedStateMachine || mDelayedScheduler.IsScheduled();
 }
 
@@ -3149,17 +3149,17 @@ void MediaDecoderStateMachine::DispatchA
       // Start DecodedStream if we are already playing. Otherwise it will be
       // handled in MaybeStartPlayback().
       if (self->IsPlaying()) {
         self->mDecodedStream->StartPlayback(self->GetMediaTime(), self->mInfo);
       }
       self->ScheduleStateMachine();
     }
   });
-  TaskQueue()->Dispatch(r.forget());
+  OwnerThread()->Dispatch(r.forget());
 }
 
 void MediaDecoderStateMachine::AddOutputStream(ProcessedMediaStream* aStream,
                                                bool aFinishWhenEnded)
 {
   MOZ_ASSERT(NS_IsMainThread());
   DECODER_LOG("AddOutputStream aStream=%p!", aStream);
   mDecodedStream->Connect(aStream, aFinishWhenEnded);
--- a/dom/media/MediaDecoderStateMachine.h
+++ b/dom/media/MediaDecoderStateMachine.h
@@ -163,17 +163,17 @@ private:
 
   void Shutdown();
 public:
 
   void DispatchShutdown()
   {
     nsCOMPtr<nsIRunnable> runnable =
       NS_NewRunnableMethod(this, &MediaDecoderStateMachine::Shutdown);
-    TaskQueue()->Dispatch(runnable.forget());
+    OwnerThread()->Dispatch(runnable.forget());
   }
 
   void FinishShutdown();
 
   // Immutable after construction - may be called on any thread.
   bool IsRealTime() const { return mRealTime; }
 
   // Functions used by assertions to ensure we're calling things
@@ -203,17 +203,17 @@ private:
   // the state machine thread.
   void StartBuffering();
 public:
 
   void DispatchStartBuffering()
   {
     nsCOMPtr<nsIRunnable> runnable =
       NS_NewRunnableMethod(this, &MediaDecoderStateMachine::StartBuffering);
-    TaskQueue()->Dispatch(runnable.forget());
+    OwnerThread()->Dispatch(runnable.forget());
   }
 
   // This is called on the state machine thread and audio thread.
   // The decoder monitor must be obtained before calling this.
   bool HasAudio() const {
     MOZ_ASSERT(OnTaskQueue());
     AssertCurrentThreadInMonitor();
     return mInfo.HasAudio();
@@ -259,33 +259,33 @@ public:
   }
 
   void DispatchNotifyDataArrived(uint32_t aLength, int64_t aOffset, bool aThrottleUpdates)
   {
     mReader->DispatchNotifyDataArrived(aLength, aOffset, aThrottleUpdates);
   }
 
   // Returns the state machine task queue.
-  MediaTaskQueue* TaskQueue() const { return mTaskQueue; }
+  MediaTaskQueue* OwnerThread() const { return mTaskQueue; }
 
   // Calls ScheduleStateMachine() after taking the decoder lock. Also
   // notifies the decoder thread in case it's waiting on the decoder lock.
   void ScheduleStateMachineWithLockAndWakeDecoder();
 
   // Schedules the shared state machine thread to run the state machine.
   //
   // The first variant coalesces multiple calls into a single state machine
   // cycle, the second variant does not. The second variant must be used when
   // not already on the state machine task queue.
   void ScheduleStateMachine();
   void ScheduleStateMachineCrossThread()
   {
     nsCOMPtr<nsIRunnable> task =
       NS_NewRunnableMethod(this, &MediaDecoderStateMachine::RunStateMachine);
-    TaskQueue()->Dispatch(task.forget());
+    OwnerThread()->Dispatch(task.forget());
   }
 
   // Invokes ScheduleStateMachine to run in |aMicroseconds| microseconds,
   // unless it's already scheduled to run earlier, in which case the
   // request is discarded.
   void ScheduleStateMachineIn(int64_t aMicroseconds);
 
   void OnDelayedSchedule()
@@ -300,17 +300,17 @@ public:
 
   // Set the media fragment end time. aEndTime is in microseconds.
   void DispatchSetFragmentEndTime(int64_t aEndTime)
   {
     nsRefPtr<MediaDecoderStateMachine> self = this;
     nsCOMPtr<nsIRunnable> r = NS_NewRunnableFunction([self, aEndTime] () {
       self->mFragmentEndTime = aEndTime;
     });
-    TaskQueue()->Dispatch(r.forget());
+    OwnerThread()->Dispatch(r.forget());
   }
 
   // Drop reference to decoder.  Only called during shutdown dance.
   void BreakCycles() {
     MOZ_ASSERT(NS_IsMainThread());
     if (mReader) {
       mReader->BreakCycles();
     }
@@ -354,17 +354,17 @@ public:
       MOZ_ASSERT(self->OnTaskQueue());
       ReentrantMonitorAutoEnter mon(self->mDecoder->GetReentrantMonitor());
       self->mMinimizePreroll = true;
 
       // Make sure that this arrives before playback starts, otherwise this won't
       // have the intended effect.
       MOZ_DIAGNOSTIC_ASSERT(self->mPlayState == MediaDecoder::PLAY_STATE_LOADING);
     });
-    TaskQueue()->Dispatch(r.forget());
+    OwnerThread()->Dispatch(r.forget());
   }
 
   void OnAudioDecoded(AudioData* aSample);
   void OnVideoDecoded(VideoData* aSample);
   void OnNotDecoded(MediaData::Type aType, MediaDecoderReader::NotDecodedReason aReason);
   void OnAudioNotDecoded(MediaDecoderReader::NotDecodedReason aReason)
   {
     MOZ_ASSERT(OnTaskQueue());
@@ -662,40 +662,40 @@ protected:
 private:
   // Update mDecoder's playback offset.
   void OnPlaybackOffsetUpdate(int64_t aPlaybackOffset);
 public:
   void DispatchOnPlaybackOffsetUpdate(int64_t aPlaybackOffset)
   {
     RefPtr<nsRunnable> r =
       NS_NewRunnableMethodWithArg<int64_t>(this, &MediaDecoderStateMachine::OnPlaybackOffsetUpdate, aPlaybackOffset);
-    TaskQueue()->Dispatch(r.forget());
+    OwnerThread()->Dispatch(r.forget());
   }
 
 private:
   // Called by the AudioSink to signal that all outstanding work is complete
   // and the sink is shutting down.
   void OnAudioSinkComplete();
 public:
   void DispatchOnAudioSinkComplete()
   {
     nsCOMPtr<nsIRunnable> runnable =
       NS_NewRunnableMethod(this, &MediaDecoderStateMachine::OnAudioSinkComplete);
-    TaskQueue()->Dispatch(runnable.forget());
+    OwnerThread()->Dispatch(runnable.forget());
   }
 private:
 
   // Called by the AudioSink to signal errors.
   void OnAudioSinkError();
 
   void DispatchOnAudioSinkError()
   {
     nsCOMPtr<nsIRunnable> runnable =
       NS_NewRunnableMethod(this, &MediaDecoderStateMachine::OnAudioSinkError);
-    TaskQueue()->Dispatch(runnable.forget());
+    OwnerThread()->Dispatch(runnable.forget());
   }
 
   // Return true if the video decoder's decode speed can not catch up the
   // play time.
   bool NeedToSkipToNextKeyframe();
 
   // The decoder object that created this state machine. The state machine
   // holds a strong reference to the decoder to ensure that the decoder stays
@@ -746,17 +746,17 @@ private:
     {
       MOZ_ASSERT(mSelf->OnTaskQueue());
       if (IsScheduled() && mTarget <= aTarget) {
         return;
       }
       Reset();
       mTarget = aTarget;
       mRequest.Begin(mMediaTimer->WaitUntil(mTarget, __func__)->Then(
-        mSelf->TaskQueue(), __func__, mSelf,
+        mSelf->OwnerThread(), __func__, mSelf,
         &MediaDecoderStateMachine::OnDelayedSchedule,
         &MediaDecoderStateMachine::NotReached));
     }
 
     void CompleteRequest()
     {
       MOZ_ASSERT(mSelf->OnTaskQueue());
       mRequest.Complete();
@@ -915,17 +915,17 @@ private:
   // NotifyAll on the monitor must be called when the state is changed so
   // that interested threads can wake up and alter behaviour if appropriate
   // Accessed on state machine, audio, main, and AV thread.
   Watchable<State> mState;
 
   // The task queue in which we run decode tasks. This is referred to as
   // the "decode thread", though in practise tasks can run on a different
   // thread every time they're called.
-  MediaTaskQueue* DecodeTaskQueue() const { return mReader->TaskQueue(); }
+  MediaTaskQueue* DecodeTaskQueue() const { return mReader->OwnerThread(); }
 
   // The time that playback started from the system clock. This is used for
   // timing the presentation of video frames when there's no audio.
   // Accessed only via the state machine thread.  Must be set via SetPlayStartTime.
   TimeStamp mPlayStartTime;
 
   // The amount of time we've spent playing already the media. The current
   // playback position is therefore |Now() - mPlayStartTime +
--- a/dom/media/MediaFormatReader.cpp
+++ b/dom/media/MediaFormatReader.cpp
@@ -281,17 +281,17 @@ MediaFormatReader::AsyncReadMetadata()
     metadata->mInfo = mInfo;
     metadata->mTags = nullptr;
     return MetadataPromise::CreateAndResolve(metadata, __func__);
   }
 
   nsRefPtr<MetadataPromise> p = mMetadataPromise.Ensure(__func__);
 
   mDemuxerInitRequest.Begin(mDemuxer->Init()
-                       ->Then(TaskQueue(), __func__, this,
+                       ->Then(OwnerThread(), __func__, this,
                               &MediaFormatReader::OnDemuxerInitDone,
                               &MediaFormatReader::OnDemuxerInitFailed));
   return p;
 }
 
 void
 MediaFormatReader::OnDemuxerInitDone(nsresult)
 {
@@ -604,17 +604,17 @@ MediaFormatReader::OnDemuxFailed(TrackTy
   }
 }
 
 void
 MediaFormatReader::DoDemuxVideo()
 {
   // TODO Use DecodeAhead value rather than 1.
   mVideo.mDemuxRequest.Begin(mVideo.mTrackDemuxer->GetSamples(1)
-                      ->Then(TaskQueue(), __func__, this,
+                      ->Then(OwnerThread(), __func__, this,
                              &MediaFormatReader::OnVideoDemuxCompleted,
                              &MediaFormatReader::OnVideoDemuxFailed));
 }
 
 void
 MediaFormatReader::OnVideoDemuxCompleted(nsRefPtr<MediaTrackDemuxer::SamplesHolder> aSamples)
 {
   LOGV("%d video samples demuxed (sid:%d)",
@@ -661,17 +661,17 @@ MediaFormatReader::RequestAudioData()
   return p;
 }
 
 void
 MediaFormatReader::DoDemuxAudio()
 {
   // TODO Use DecodeAhead value rather than 1.
   mAudio.mDemuxRequest.Begin(mAudio.mTrackDemuxer->GetSamples(1)
-                      ->Then(TaskQueue(), __func__, this,
+                      ->Then(OwnerThread(), __func__, this,
                              &MediaFormatReader::OnAudioDemuxCompleted,
                              &MediaFormatReader::OnAudioDemuxFailed));
 }
 
 void
 MediaFormatReader::OnAudioDemuxCompleted(nsRefPtr<MediaTrackDemuxer::SamplesHolder> aSamples)
 {
   LOGV("%d audio samples demuxed (sid:%d)",
@@ -783,17 +783,17 @@ MediaFormatReader::ScheduleUpdate(TrackT
   auto& decoder = GetDecoderData(aTrack);
   if (decoder.mUpdateScheduled) {
     return;
   }
   LOGV("SchedulingUpdate(%s)", TrackTypeToStr(aTrack));
   decoder.mUpdateScheduled = true;
   RefPtr<nsIRunnable> task(
     NS_NewRunnableMethodWithArg<TrackType>(this, &MediaFormatReader::Update, aTrack));
-  TaskQueue()->Dispatch(task.forget());
+  OwnerThread()->Dispatch(task.forget());
 }
 
 bool
 MediaFormatReader::UpdateReceivedNewData(TrackType aTrack)
 {
   MOZ_ASSERT(OnTaskQueue());
   auto& decoder = GetDecoderData(aTrack);
 
@@ -916,17 +916,17 @@ MediaFormatReader::DecodeDemuxedSamples(
         decoder.mQueuedSamples.MoveElementsFrom(samples);
       } else {
         MOZ_ASSERT(decoder.mTimeThreshold.isNothing());
         LOG("Stream change occurred on a non-keyframe. Seeking to:%lld",
             sample->mTime);
         decoder.mTimeThreshold = Some(TimeUnit::FromMicroseconds(sample->mTime));
         nsRefPtr<MediaFormatReader> self = this;
         decoder.mSeekRequest.Begin(decoder.mTrackDemuxer->Seek(decoder.mTimeThreshold.ref())
-                   ->Then(TaskQueue(), __func__,
+                   ->Then(OwnerThread(), __func__,
                           [self, aTrack] (media::TimeUnit aTime) {
                             auto& decoder = self->GetDecoderData(aTrack);
                             decoder.mSeekRequest.Complete();
                             self->ScheduleUpdate(aTrack);
                           },
                           [self, aTrack] (DemuxerFailureReason aResult) {
                             auto& decoder = self->GetDecoderData(aTrack);
                             decoder.mSeekRequest.Complete();
@@ -1182,44 +1182,44 @@ MediaFormatReader::Output(TrackType aTra
     NS_WARNING("MediaFormatReader::Output() passed a null sample");
     Error(aTrack);
     return;
   }
 
   RefPtr<nsIRunnable> task =
     NS_NewRunnableMethodWithArgs<TrackType, MediaData*>(
       this, &MediaFormatReader::NotifyNewOutput, aTrack, aSample);
-  TaskQueue()->Dispatch(task.forget());
+  OwnerThread()->Dispatch(task.forget());
 }
 
 void
 MediaFormatReader::DrainComplete(TrackType aTrack)
 {
   RefPtr<nsIRunnable> task =
     NS_NewRunnableMethodWithArg<TrackType>(
       this, &MediaFormatReader::NotifyDrainComplete, aTrack);
-  TaskQueue()->Dispatch(task.forget());
+  OwnerThread()->Dispatch(task.forget());
 }
 
 void
 MediaFormatReader::InputExhausted(TrackType aTrack)
 {
   RefPtr<nsIRunnable> task =
     NS_NewRunnableMethodWithArg<TrackType>(
       this, &MediaFormatReader::NotifyInputExhausted, aTrack);
-  TaskQueue()->Dispatch(task.forget());
+  OwnerThread()->Dispatch(task.forget());
 }
 
 void
 MediaFormatReader::Error(TrackType aTrack)
 {
   RefPtr<nsIRunnable> task =
     NS_NewRunnableMethodWithArg<TrackType>(
       this, &MediaFormatReader::NotifyError, aTrack);
-  TaskQueue()->Dispatch(task.forget());
+  OwnerThread()->Dispatch(task.forget());
 }
 
 void
 MediaFormatReader::Flush(TrackType aTrack)
 {
   MOZ_ASSERT(OnTaskQueue());
   LOG("Flush(%s) BEGIN", TrackTypeToStr(aTrack));
 
@@ -1246,17 +1246,17 @@ MediaFormatReader::SkipVideoDemuxToNextK
   LOG("Skipping up to %lld", aTimeThreshold.ToMicroseconds());
 
   if (mVideo.mError) {
     mVideo.RejectPromise(DECODE_ERROR, __func__);
     return;
   }
 
   mSkipRequest.Begin(mVideo.mTrackDemuxer->SkipToNextRandomAccessPoint(aTimeThreshold)
-                          ->Then(TaskQueue(), __func__, this,
+                          ->Then(OwnerThread(), __func__, this,
                                  &MediaFormatReader::OnVideoSkipCompleted,
                                  &MediaFormatReader::OnVideoSkipFailed));
   return;
 }
 
 void
 MediaFormatReader::OnVideoSkipCompleted(uint32_t aSkipped)
 {
@@ -1363,17 +1363,17 @@ MediaFormatReader::OnSeekFailed(TrackTyp
 
 void
 MediaFormatReader::DoVideoSeek()
 {
   MOZ_ASSERT(mPendingSeekTime.isSome());
   LOGV("Seeking video to %lld", mPendingSeekTime.ref().ToMicroseconds());
   media::TimeUnit seekTime = mPendingSeekTime.ref();
   mVideo.mSeekRequest.Begin(mVideo.mTrackDemuxer->Seek(seekTime)
-                          ->Then(TaskQueue(), __func__, this,
+                          ->Then(OwnerThread(), __func__, this,
                                  &MediaFormatReader::OnVideoSeekCompleted,
                                  &MediaFormatReader::OnVideoSeekFailed));
 }
 
 void
 MediaFormatReader::OnVideoSeekCompleted(media::TimeUnit aTime)
 {
   MOZ_ASSERT(OnTaskQueue());
@@ -1391,17 +1391,17 @@ MediaFormatReader::OnVideoSeekCompleted(
 
 void
 MediaFormatReader::DoAudioSeek()
 {
   MOZ_ASSERT(mPendingSeekTime.isSome());
   LOGV("Seeking audio to %lld", mPendingSeekTime.ref().ToMicroseconds());
   media::TimeUnit seekTime = mPendingSeekTime.ref();
   mAudio.mSeekRequest.Begin(mAudio.mTrackDemuxer->Seek(seekTime)
-                         ->Then(TaskQueue(), __func__, this,
+                         ->Then(OwnerThread(), __func__, this,
                                 &MediaFormatReader::OnAudioSeekCompleted,
                                 &MediaFormatReader::OnAudioSeekFailed));
 }
 
 void
 MediaFormatReader::OnAudioSeekCompleted(media::TimeUnit aTime)
 {
   MOZ_ASSERT(OnTaskQueue());
--- a/dom/media/android/AndroidMediaReader.cpp
+++ b/dom/media/android/AndroidMediaReader.cpp
@@ -331,17 +331,17 @@ AndroidMediaReader::Seek(int64_t aTarget
     // a sync point, whereas for video there are only keyframes once every few
     // seconds. So if we have both audio and video, we must seek the video
     // stream to the preceeding keyframe first, get the stream time, and then
     // seek the audio stream to match the video stream's time. Otherwise, the
     // audio and video streams won't be in sync after the seek.
     mVideoSeekTimeUs = aTarget;
 
     nsRefPtr<AndroidMediaReader> self = this;
-    mSeekRequest.Begin(DecodeToFirstVideoData()->Then(TaskQueue(), __func__, [self] (VideoData* v) {
+    mSeekRequest.Begin(DecodeToFirstVideoData()->Then(OwnerThread(), __func__, [self] (VideoData* v) {
       self->mSeekRequest.Complete();
       self->mAudioSeekTimeUs = v->mTime;
       self->mSeekPromise.Resolve(self->mAudioSeekTimeUs, __func__);
     }, [self, aTarget] () {
       self->mSeekRequest.Complete();
       self->mAudioSeekTimeUs = aTarget;
       self->mSeekPromise.Resolve(aTarget, __func__);
     }));
--- a/dom/media/fmp4/MP4Reader.cpp
+++ b/dom/media/fmp4/MP4Reader.cpp
@@ -642,17 +642,17 @@ MP4Reader::ScheduleUpdate(TrackType aTra
   decoder.mMonitor.AssertCurrentThreadOwns();
   if (decoder.mUpdateScheduled) {
     return;
   }
   VLOG("SchedulingUpdate(%s)", TrackTypeToStr(aTrack));
   decoder.mUpdateScheduled = true;
   RefPtr<nsIRunnable> task(
     NS_NewRunnableMethodWithArg<TrackType>(this, &MP4Reader::Update, aTrack));
-  TaskQueue()->Dispatch(task.forget());
+  OwnerThread()->Dispatch(task.forget());
 }
 
 bool
 MP4Reader::NeedInput(DecoderData& aDecoder)
 {
   aDecoder.mMonitor.AssertCurrentThreadOwns();
   // We try to keep a few more compressed samples input than decoded samples
   // have been output, provided the state machine has requested we send it a
--- a/dom/media/gtest/TestMP4Reader.cpp
+++ b/dom/media/gtest/TestMP4Reader.cpp
@@ -48,17 +48,17 @@ public:
     EXPECT_EQ(NS_OK, rv);
     thread->Shutdown();
   }
 
 private:
   virtual ~TestBinding()
   {
     {
-      nsRefPtr<MediaTaskQueue> queue = reader->TaskQueue();
+      nsRefPtr<MediaTaskQueue> queue = reader->OwnerThread();
       nsCOMPtr<nsIRunnable> task = NS_NewRunnableMethod(reader, &MP4Reader::Shutdown);
       // Hackily bypass the tail dispatcher so that we can AwaitShutdownAndIdle.
       // In production code we'd use BeginShutdown + promises.
       queue->Dispatch(task.forget(), AbstractThread::AssertDispatchSuccess,
                       AbstractThread::TailDispatch);
       queue->AwaitShutdownAndIdle();
     }
     decoder = nullptr;
--- a/dom/media/gtest/TestMozPromise.cpp
+++ b/dom/media/gtest/TestMozPromise.cpp
@@ -22,17 +22,17 @@ public:
     : mTaskQueue(new MediaTaskQueue(GetMediaThreadPool(MediaThreadType::PLAYBACK)))
   {}
 
   ~AutoTaskQueue()
   {
     mTaskQueue->AwaitShutdownAndIdle();
   }
 
-  MediaTaskQueue* TaskQueue() { return mTaskQueue; }
+  MediaTaskQueue* Queue() { return mTaskQueue; }
 private:
   nsRefPtr<MediaTaskQueue> mTaskQueue;
 };
 
 class DelayedResolveOrReject : public nsRunnable
 {
 public:
   DelayedResolveOrReject(MediaTaskQueue* aTaskQueue,
@@ -86,39 +86,39 @@ RunOnTaskQueue(MediaTaskQueue* aQueue, F
 }
 
 // std::function can't come soon enough. :-(
 #define DO_FAIL []()->void { EXPECT_TRUE(false); }
 
 TEST(MozPromise, BasicResolve)
 {
   AutoTaskQueue atq;
-  nsRefPtr<MediaTaskQueue> queue = atq.TaskQueue();
+  nsRefPtr<MediaTaskQueue> queue = atq.Queue();
   RunOnTaskQueue(queue, [queue] () -> void {
     TestPromise::CreateAndResolve(42, __func__)->Then(queue, __func__,
       [queue] (int aResolveValue) -> void { EXPECT_EQ(aResolveValue, 42); queue->BeginShutdown(); },
       DO_FAIL);
   });
 }
 
 TEST(MozPromise, BasicReject)
 {
   AutoTaskQueue atq;
-  nsRefPtr<MediaTaskQueue> queue = atq.TaskQueue();
+  nsRefPtr<MediaTaskQueue> queue = atq.Queue();
   RunOnTaskQueue(queue, [queue] () -> void {
     TestPromise::CreateAndReject(42.0, __func__)->Then(queue, __func__,
       DO_FAIL,
       [queue] (int aRejectValue) -> void { EXPECT_EQ(aRejectValue, 42.0); queue->BeginShutdown(); });
   });
 }
 
 TEST(MozPromise, AsyncResolve)
 {
   AutoTaskQueue atq;
-  nsRefPtr<MediaTaskQueue> queue = atq.TaskQueue();
+  nsRefPtr<MediaTaskQueue> queue = atq.Queue();
   RunOnTaskQueue(queue, [queue] () -> void {
     nsRefPtr<TestPromise::Private> p = new TestPromise::Private(__func__);
 
     // Kick off three racing tasks, and make sure we get the one that finishes earliest.
     nsRefPtr<DelayedResolveOrReject> a = new DelayedResolveOrReject(queue, p, RRValue::MakeResolve(32), 10);
     nsRefPtr<DelayedResolveOrReject> b = new DelayedResolveOrReject(queue, p, RRValue::MakeResolve(42), 5);
     nsRefPtr<DelayedResolveOrReject> c = new DelayedResolveOrReject(queue, p, RRValue::MakeReject(32.0), 7);
 
@@ -138,17 +138,17 @@ TEST(MozPromise, AsyncResolve)
     }, DO_FAIL);
   });
 }
 
 TEST(MozPromise, CompletionPromises)
 {
   bool invokedPass = false;
   AutoTaskQueue atq;
-  nsRefPtr<MediaTaskQueue> queue = atq.TaskQueue();
+  nsRefPtr<MediaTaskQueue> queue = atq.Queue();
   RunOnTaskQueue(queue, [queue, &invokedPass] () -> void {
     TestPromise::CreateAndResolve(40, __func__)
     ->Then(queue, __func__,
       [] (int aVal) -> nsRefPtr<TestPromise> { return TestPromise::CreateAndResolve(aVal + 10, __func__); },
       DO_FAIL)
     ->CompletionPromise()
     ->Then(queue, __func__, [&invokedPass] () -> void { invokedPass = true; }, DO_FAIL)
     ->CompletionPromise()
@@ -169,17 +169,17 @@ TEST(MozPromise, CompletionPromises)
       DO_FAIL,
       [queue, &invokedPass] (double aVal) -> void { EXPECT_EQ(aVal, 42.0); EXPECT_TRUE(invokedPass); queue->BeginShutdown(); });
   });
 }
 
 TEST(MozPromise, PromiseAllResolve)
 {
   AutoTaskQueue atq;
-  nsRefPtr<MediaTaskQueue> queue = atq.TaskQueue();
+  nsRefPtr<MediaTaskQueue> queue = atq.Queue();
   RunOnTaskQueue(queue, [queue] () -> void {
 
     nsTArray<nsRefPtr<TestPromise>> promises;
     promises.AppendElement(TestPromise::CreateAndResolve(22, __func__));
     promises.AppendElement(TestPromise::CreateAndResolve(32, __func__));
     promises.AppendElement(TestPromise::CreateAndResolve(42, __func__));
 
     TestPromise::All(queue, promises)->Then(queue, __func__,
@@ -193,17 +193,17 @@ TEST(MozPromise, PromiseAllResolve)
       DO_FAIL
     );
   });
 }
 
 TEST(MozPromise, PromiseAllReject)
 {
   AutoTaskQueue atq;
-  nsRefPtr<MediaTaskQueue> queue = atq.TaskQueue();
+  nsRefPtr<MediaTaskQueue> queue = atq.Queue();
   RunOnTaskQueue(queue, [queue] () -> void {
 
     nsTArray<nsRefPtr<TestPromise>> promises;
     promises.AppendElement(TestPromise::CreateAndResolve(22, __func__));
     promises.AppendElement(TestPromise::CreateAndReject(32.0, __func__));
     promises.AppendElement(TestPromise::CreateAndResolve(42, __func__));
 
     TestPromise::All(queue, promises)->Then(queue, __func__,
--- a/dom/media/mediasource/MediaSourceReader.cpp
+++ b/dom/media/mediasource/MediaSourceReader.cpp
@@ -158,17 +158,17 @@ MediaSourceReader::RequestAudioData()
   }
   MOZ_DIAGNOSTIC_ASSERT(!mAudioSeekRequest.Exists());
 
   SwitchSourceResult ret = SwitchAudioSource(&mLastAudioTime);
   switch (ret) {
     case SOURCE_NEW:
       GetAudioReader()->ResetDecode();
       mAudioSeekRequest.Begin(GetAudioReader()->Seek(GetReaderAudioTime(mLastAudioTime), 0)
-                              ->Then(TaskQueue(), __func__, this,
+                              ->Then(OwnerThread(), __func__, this,
                                      &MediaSourceReader::CompleteAudioSeekAndDoRequest,
                                      &MediaSourceReader::CompleteAudioSeekAndRejectPromise));
       break;
     case SOURCE_NONE:
       if (!mLastAudioTime) {
         // This is the first call to RequestAudioData.
         // Fallback to using decoder with earliest data.
         mAudioSourceDecoder = FirstDecoder(MediaData::AUDIO_DATA);
@@ -183,17 +183,17 @@ MediaSourceReader::RequestAudioData()
       break;
   }
   return p;
 }
 
 void MediaSourceReader::DoAudioRequest()
 {
   mAudioRequest.Begin(GetAudioReader()->RequestAudioData()
-                      ->Then(TaskQueue(), __func__, this,
+                      ->Then(OwnerThread(), __func__, this,
                              &MediaSourceReader::OnAudioDecoded,
                              &MediaSourceReader::OnAudioNotDecoded));
 }
 
 void
 MediaSourceReader::OnAudioDecoded(AudioData* aSample)
 {
   MOZ_DIAGNOSTIC_ASSERT(!IsSeeking());
@@ -206,17 +206,17 @@ MediaSourceReader::OnAudioDecoded(AudioD
 
   MSE_DEBUGV("[mTime=%lld mDuration=%lld mDiscontinuity=%d]",
              ourTime, aSample->mDuration, aSample->mDiscontinuity);
   if (mDropAudioBeforeThreshold) {
     if (ourTime < mTimeThreshold) {
       MSE_DEBUG("mTime=%lld < mTimeThreshold=%lld",
                 ourTime, mTimeThreshold);
       mAudioRequest.Begin(GetAudioReader()->RequestAudioData()
-                          ->Then(TaskQueue(), __func__, this,
+                          ->Then(OwnerThread(), __func__, this,
                                  &MediaSourceReader::OnAudioDecoded,
                                  &MediaSourceReader::OnAudioNotDecoded));
       return;
     }
     mDropAudioBeforeThreshold = false;
   }
 
   // Adjust the sample time into our reference.
@@ -276,17 +276,17 @@ MediaSourceReader::OnAudioNotDecoded(Not
     AdjustEndTime(&mLastAudioTime, mAudioSourceDecoder);
   }
 
   SwitchSourceResult result = SwitchAudioSource(&mLastAudioTime);
   // See if we can find a different source that can pick up where we left off.
   if (result == SOURCE_NEW) {
     GetAudioReader()->ResetDecode();
     mAudioSeekRequest.Begin(GetAudioReader()->Seek(GetReaderAudioTime(mLastAudioTime), 0)
-                            ->Then(TaskQueue(), __func__, this,
+                            ->Then(OwnerThread(), __func__, this,
                                    &MediaSourceReader::CompleteAudioSeekAndDoRequest,
                                    &MediaSourceReader::CompleteAudioSeekAndRejectPromise));
     return;
   }
 
   // If we got a DECODE_ERROR and we have buffered data in the requested range
   // then it must be a genuine decoding error.
   // Otherwise we can assume that the data was either evicted or explicitely
@@ -334,17 +334,17 @@ MediaSourceReader::RequestVideoData(bool
   MOZ_DIAGNOSTIC_ASSERT(!mVideoSeekRequest.Exists());
   mForceVideoDecodeAhead = aForceDecodeAhead;
 
   SwitchSourceResult ret = SwitchVideoSource(&mLastVideoTime);
   switch (ret) {
     case SOURCE_NEW:
       GetVideoReader()->ResetDecode();
       mVideoSeekRequest.Begin(GetVideoReader()->Seek(GetReaderVideoTime(mLastVideoTime), 0)
-                             ->Then(TaskQueue(), __func__, this,
+                             ->Then(OwnerThread(), __func__, this,
                                     &MediaSourceReader::CompleteVideoSeekAndDoRequest,
                                     &MediaSourceReader::CompleteVideoSeekAndRejectPromise));
       break;
     case SOURCE_NONE:
       if (!mLastVideoTime) {
         // This is the first call to RequestVideoData.
         // Fallback to using decoder with earliest data.
         mVideoSourceDecoder = FirstDecoder(MediaData::VIDEO_DATA);
@@ -363,17 +363,17 @@ MediaSourceReader::RequestVideoData(bool
 }
 
 void
 MediaSourceReader::DoVideoRequest()
 {
   mVideoRequest.Begin(GetVideoReader()->RequestVideoData(mDropVideoBeforeThreshold,
                                                          GetReaderVideoTime(mTimeThreshold),
                                                          mForceVideoDecodeAhead)
-                      ->Then(TaskQueue(), __func__, this,
+                      ->Then(OwnerThread(), __func__, this,
                              &MediaSourceReader::OnVideoDecoded,
                              &MediaSourceReader::OnVideoNotDecoded));
 }
 
 void
 MediaSourceReader::OnVideoDecoded(VideoData* aSample)
 {
   MOZ_DIAGNOSTIC_ASSERT(!IsSeeking());
@@ -433,17 +433,17 @@ MediaSourceReader::OnVideoNotDecoded(Not
     AdjustEndTime(&mLastVideoTime, mVideoSourceDecoder);
   }
 
   // See if we can find a different reader that can pick up where we left off.
   SwitchSourceResult result = SwitchVideoSource(&mLastVideoTime);
   if (result == SOURCE_NEW) {
     GetVideoReader()->ResetDecode();
     mVideoSeekRequest.Begin(GetVideoReader()->Seek(GetReaderVideoTime(mLastVideoTime), 0)
-                           ->Then(TaskQueue(), __func__, this,
+                           ->Then(OwnerThread(), __func__, this,
                                   &MediaSourceReader::CompleteVideoSeekAndDoRequest,
                                   &MediaSourceReader::CompleteVideoSeekAndRejectPromise));
     return;
   }
 
   // If we got a DECODE_ERROR and we have buffered data in the requested range
   // then it must be a genuine decoding error.
   // Otherwise we can assume that the data was either evicted or explicitely
@@ -504,17 +504,17 @@ MediaSourceReader::Shutdown()
   return p;
 }
 
 void
 MediaSourceReader::ContinueShutdown()
 {
   ReentrantMonitorAutoEnter mon(mDecoder->GetReentrantMonitor());
   if (mTrackBuffers.Length()) {
-    mTrackBuffers[0]->Shutdown()->Then(TaskQueue(), __func__, this,
+    mTrackBuffers[0]->Shutdown()->Then(OwnerThread(), __func__, this,
                                        &MediaSourceReader::ContinueShutdown,
                                        &MediaSourceReader::ContinueShutdown);
     mShutdownTrackBuffers.AppendElement(mTrackBuffers[0]);
     mTrackBuffers.RemoveElementAt(0);
     return;
   }
 
   mAudioTrack = nullptr;
@@ -737,17 +737,17 @@ MediaSourceReader::CreateSubDecoder(cons
   // its own task queue. Unfortunately though, Request{Audio,Video}Data implementations
   // currently assert that they're on "the decode thread", and so having
   // separate task queues makes MediaSource stuff unnecessarily cumbersome. We
   // should remove the need for these assertions (which probably involves making
   // all Request*Data implementations fully async), and then get rid of the
   // borrowing.
   nsRefPtr<SourceBufferDecoder> decoder =
     new SourceBufferDecoder(new SourceBufferResource(aType), mDecoder, aTimestampOffset);
-  nsRefPtr<MediaDecoderReader> reader(CreateReaderForType(aType, decoder, TaskQueue()));
+  nsRefPtr<MediaDecoderReader> reader(CreateReaderForType(aType, decoder, OwnerThread()));
   if (!reader) {
     return nullptr;
   }
 
   // MSE uses a start time of 0 everywhere. Set that immediately on the
   // subreader to make sure that it's always in a state where we can invoke
   // GetBuffered on it.
   reader->DispatchSetStartTime(0);
@@ -824,17 +824,17 @@ MediaSourceReader::TrackBuffersContainTi
 void
 MediaSourceReader::NotifyTimeRangesChanged()
 {
   ReentrantMonitorAutoEnter mon(mDecoder->GetReentrantMonitor());
   if (mWaitingForSeekData) {
     //post a task to the decode queue to try to complete the pending seek.
     RefPtr<nsIRunnable> task(NS_NewRunnableMethod(
         this, &MediaSourceReader::AttemptSeek));
-    TaskQueue()->Dispatch(task.forget());
+    OwnerThread()->Dispatch(task.forget());
   } else {
     MaybeNotifyHaveData();
   }
 }
 
 nsRefPtr<MediaDecoderReader::SeekPromise>
 MediaSourceReader::Seek(int64_t aTime, int64_t aIgnored /* Used only for ogg which is non-MSE */)
 {
@@ -941,17 +941,17 @@ MediaSourceReader::DoAudioSeek()
       SwitchAudioSource(&mOriginalSeekTime) == SOURCE_NONE) {
     // Data we need got evicted since the last time we checked for data
     // availability. Abort current seek attempt.
     mWaitingForSeekData = true;
     return;
   }
   GetAudioReader()->ResetDecode();
   mAudioSeekRequest.Begin(GetAudioReader()->Seek(GetReaderAudioTime(seekTime), 0)
-                         ->Then(TaskQueue(), __func__, this,
+                         ->Then(OwnerThread(), __func__, this,
                                 &MediaSourceReader::OnAudioSeekCompleted,
                                 &MediaSourceReader::OnAudioSeekFailed));
   MSE_DEBUG("reader=%p", GetAudioReader());
 }
 
 void
 MediaSourceReader::OnAudioSeekCompleted(int64_t aTime)
 {
@@ -1013,17 +1013,17 @@ MediaSourceReader::DoVideoSeek()
   if (SwitchVideoSource(&seekTime) == SOURCE_NONE) {
     // Data we need got evicted since the last time we checked for data
     // availability. Abort current seek attempt.
     mWaitingForSeekData = true;
     return;
   }
   GetVideoReader()->ResetDecode();
   mVideoSeekRequest.Begin(GetVideoReader()->Seek(GetReaderVideoTime(seekTime), 0)
-                          ->Then(TaskQueue(), __func__, this,
+                          ->Then(OwnerThread(), __func__, this,
                                  &MediaSourceReader::OnVideoSeekCompleted,
                                  &MediaSourceReader::OnVideoSeekFailed));
   MSE_DEBUG("reader=%p", GetVideoReader());
 }
 
 media::TimeIntervals
 MediaSourceReader::GetBuffered()
 {
@@ -1215,17 +1215,17 @@ MediaSourceReader::Ended(bool aEnded)
 {
   mDecoder->GetReentrantMonitor().AssertCurrentThreadIn();
   mEnded = aEnded;
   if (aEnded) {
     // post a task to the decode queue to try to complete any pending
     // seek or wait
     RefPtr<nsIRunnable> task(NS_NewRunnableMethod(
         this, &MediaSourceReader::NotifyTimeRangesChanged));
-    TaskQueue()->Dispatch(task.forget());
+    OwnerThread()->Dispatch(task.forget());
   }
 }
 
 bool
 MediaSourceReader::IsEnded()
 {
   ReentrantMonitorAutoEnter mon(mDecoder->GetReentrantMonitor());
   return mEnded;
--- a/dom/media/mediasource/TrackBuffer.cpp
+++ b/dom/media/mediasource/TrackBuffer.cpp
@@ -110,29 +110,29 @@ TrackBuffer::Shutdown()
   mMetadataRequest.DisconnectIfExists();
 
   MOZ_ASSERT(mShutdownPromise.IsEmpty());
   nsRefPtr<ShutdownPromise> p = mShutdownPromise.Ensure(__func__);
 
   RefPtr<MediaTaskQueue> queue = mTaskQueue;
   mTaskQueue = nullptr;
   queue->BeginShutdown()
-       ->Then(mParentDecoder->GetReader()->TaskQueue(), __func__, this,
+       ->Then(mParentDecoder->GetReader()->OwnerThread(), __func__, this,
               &TrackBuffer::ContinueShutdown, &TrackBuffer::ContinueShutdown);
 
   return p;
 }
 
 void
 TrackBuffer::ContinueShutdown()
 {
   ReentrantMonitorAutoEnter mon(mParentDecoder->GetReentrantMonitor());
   if (mDecoders.Length()) {
     mDecoders[0]->GetReader()->Shutdown()
-                ->Then(mParentDecoder->GetReader()->TaskQueue(), __func__, this,
+                ->Then(mParentDecoder->GetReader()->OwnerThread(), __func__, this,
                        &TrackBuffer::ContinueShutdown, &TrackBuffer::ContinueShutdown);
     mShutdownDecoders.AppendElement(mDecoders[0]);
     mDecoders.RemoveElementAt(0);
     return;
   }
 
   MOZ_ASSERT(!mCurrentDecoder, "Detach() should have been called");
   mInitializedDecoders.Clear();
@@ -262,20 +262,20 @@ TrackBuffer::BufferAppend()
   if (decoders.Length()) {
     // We're going to have to wait for the decoder to initialize, the promise
     // will be resolved once initialization completes.
     return p;
   }
 
   nsRefPtr<TrackBuffer> self = this;
 
-  ProxyMediaCall(mParentDecoder->GetReader()->TaskQueue(), this, __func__,
+  ProxyMediaCall(mParentDecoder->GetReader()->OwnerThread(), this, __func__,
                  &TrackBuffer::UpdateBufferedRanges,
                  mLastAppendRange, /* aNotifyParent */ true)
-      ->Then(mParentDecoder->GetReader()->TaskQueue(), __func__,
+      ->Then(mParentDecoder->GetReader()->OwnerThread(), __func__,
              [self] {
                self->mInitializationPromise.ResolveIfExists(self->HasInitSegment(), __func__);
              },
              [self] (nsresult) { MOZ_CRASH("Never called."); });
 
   return p;
 }
 
@@ -343,32 +343,32 @@ TrackBuffer::UpdateBufferedRanges(Interv
 }
 
 void
 TrackBuffer::NotifyTimeRangesChanged()
 {
   RefPtr<nsIRunnable> task =
     NS_NewRunnableMethod(mParentDecoder->GetReader(),
                          &MediaSourceReader::NotifyTimeRangesChanged);
-  mParentDecoder->GetReader()->TaskQueue()->Dispatch(task.forget());
+  mParentDecoder->GetReader()->OwnerThread()->Dispatch(task.forget());
 }
 
 void
 TrackBuffer::NotifyReaderDataRemoved(MediaDecoderReader* aReader)
 {
   MOZ_ASSERT(NS_IsMainThread());
 
   nsRefPtr<TrackBuffer> self = this;
   nsRefPtr<MediaDecoderReader> reader = aReader;
   RefPtr<nsIRunnable> task =
     NS_NewRunnableFunction([self, reader] () {
       reader->NotifyDataRemoved();
       self->UpdateBufferedRanges(Interval<int64_t>(), /* aNotifyParent */ false);
     });
-  aReader->TaskQueue()->Dispatch(task.forget());
+  aReader->OwnerThread()->Dispatch(task.forget());
 }
 
 class DecoderSorter
 {
 public:
   explicit DecoderSorter(const TrackBuffer::DecoderBufferedMap& aMap)
     : mMap(aMap)
   {}
@@ -672,33 +672,33 @@ TrackBuffer::NewDecoder(TimeUnit aTimest
   ReentrantMonitorAutoEnter mon(mParentDecoder->GetReentrantMonitor());
   mCurrentDecoder = decoder;
   mDecoders.AppendElement(decoder);
 
   mLastStartTimestamp = 0;
   mLastEndTimestamp.reset();
   mLastTimestampOffset = aTimestampOffset;
 
-  decoder->SetTaskQueue(decoder->GetReader()->TaskQueue());
+  decoder->SetTaskQueue(decoder->GetReader()->OwnerThread());
   return decoder.forget();
 }
 
 bool
 TrackBuffer::QueueInitializeDecoder(SourceBufferDecoder* aDecoder)
 {
   // Bug 1153295: We must ensure that the nsIRunnable hold a strong reference
   // to aDecoder.
   static_assert(mozilla::IsBaseOf<nsISupports, SourceBufferDecoder>::value,
                 "SourceBufferDecoder must be inheriting from nsISupports");
   RefPtr<nsIRunnable> task =
     NS_NewRunnableMethodWithArg<SourceBufferDecoder*>(this,
                                                       &TrackBuffer::InitializeDecoder,
                                                       aDecoder);
   // We need to initialize the reader on its own task queue
-  aDecoder->GetReader()->TaskQueue()->Dispatch(task.forget());
+  aDecoder->GetReader()->OwnerThread()->Dispatch(task.forget());
   return true;
 }
 
 // MetadataRecipient is a is used to pass extra values required by the
 // MetadataPromise's target methods
 class MetadataRecipient {
 public:
   NS_INLINE_DECL_THREADSAFE_REFCOUNTING(MetadataRecipient);
@@ -789,17 +789,17 @@ TrackBuffer::InitializeDecoder(SourceBuf
     MSE_DEBUG("was shut down while reading metadata. Aborting initialization.");
     return;
   }
   if (mCurrentDecoder != aDecoder) {
     MSE_DEBUG("append was cancelled. Aborting initialization.");
     return;
   }
 
-  mMetadataRequest.Begin(promise->Then(reader->TaskQueue(), __func__,
+  mMetadataRequest.Begin(promise->Then(reader->OwnerThread(), __func__,
                                        recipient.get(),
                                        &MetadataRecipient::OnMetadataRead,
                                        &MetadataRecipient::OnMetadataNotRead));
 }
 
 void
 TrackBuffer::OnMetadataRead(MetadataHolder* aMetadata,
                             SourceBufferDecoder* aDecoder,
@@ -860,17 +860,17 @@ TrackBuffer::OnMetadataRead(MetadataHold
     return;
   }
 }
 
 void
 TrackBuffer::OnMetadataNotRead(ReadMetadataFailureReason aReason,
                                SourceBufferDecoder* aDecoder)
 {
-  MOZ_ASSERT(aDecoder->GetReader()->TaskQueue()->IsCurrentThreadIn());
+  MOZ_ASSERT(aDecoder->GetReader()->OwnerThread()->IsCurrentThreadIn());
 
   mParentDecoder->GetReentrantMonitor().AssertNotCurrentThreadIn();
   ReentrantMonitorAutoEnter mon(mParentDecoder->GetReentrantMonitor());
 
   mMetadataRequest.Complete();
 
   if (mShutdown) {
     MSE_DEBUG("was shut down while reading metadata. Aborting initialization.");
@@ -932,20 +932,20 @@ TrackBuffer::CompleteInitializeDecoder(S
 
   // Tell our reader that we have more data to ensure that playback starts if
   // required when data is appended.
   NotifyTimeRangesChanged();
 
   MSE_DEBUG("Reader %p activated",
             aDecoder->GetReader());
   nsRefPtr<TrackBuffer> self = this;
-  ProxyMediaCall(mParentDecoder->GetReader()->TaskQueue(), this, __func__,
+  ProxyMediaCall(mParentDecoder->GetReader()->OwnerThread(), this, __func__,
                  &TrackBuffer::UpdateBufferedRanges,
                  Interval<int64_t>(), /* aNotifyParent */ true)
-      ->Then(mParentDecoder->GetReader()->TaskQueue(), __func__,
+      ->Then(mParentDecoder->GetReader()->OwnerThread(), __func__,
              [self] {
                self->mInitializationPromise.ResolveIfExists(self->HasInitSegment(), __func__);
              },
              [self] (nsresult) { MOZ_CRASH("Never called."); });
 }
 
 bool
 TrackBuffer::ValidateTrackFormats(const MediaInfo& aInfo)
@@ -1191,17 +1191,17 @@ TrackBuffer::RemoveDecoder(SourceBufferD
     // There should be no other references to the decoder. Assert that
     // we aren't using it in the MediaSourceReader.
     MOZ_ASSERT(!mParentDecoder->IsActiveReader(aDecoder->GetReader()));
     mInitializedDecoders.RemoveElement(aDecoder);
     mDecoders.RemoveElement(aDecoder);
     // Remove associated buffered range from our cache.
     mReadersBuffered.erase(aDecoder);
   }
-  aDecoder->GetReader()->TaskQueue()->Dispatch(task.forget());
+  aDecoder->GetReader()->OwnerThread()->Dispatch(task.forget());
 }
 
 nsRefPtr<TrackBuffer::RangeRemovalPromise>
 TrackBuffer::RangeRemoval(TimeUnit aStart, TimeUnit aEnd)
 {
   MOZ_ASSERT(NS_IsMainThread());
   ReentrantMonitorAutoEnter mon(mParentDecoder->GetReentrantMonitor());
 
@@ -1264,20 +1264,20 @@ TrackBuffer::RangeRemoval(TimeUnit aStar
   }
 
   RemoveEmptyDecoders(decoders);
 
   nsRefPtr<RangeRemovalPromise> p = mRangeRemovalPromise.Ensure(__func__);
 
   // Make sure our buffered ranges got updated before resolving promise.
   nsRefPtr<TrackBuffer> self = this;
-  ProxyMediaCall(mParentDecoder->GetReader()->TaskQueue(), this, __func__,
+  ProxyMediaCall(mParentDecoder->GetReader()->OwnerThread(), this, __func__,
                  &TrackBuffer::UpdateBufferedRanges,
                  Interval<int64_t>(), /* aNotifyParent */ false)
-    ->Then(mParentDecoder->GetReader()->TaskQueue(), __func__,
+    ->Then(mParentDecoder->GetReader()->OwnerThread(), __func__,
            [self] {
              self->mRangeRemovalPromise.ResolveIfExists(true, __func__);
            },
            [self] (nsresult) { MOZ_CRASH("Never called."); });
 
   return p;
 }
 
--- a/dom/media/omx/MediaCodecReader.cpp
+++ b/dom/media/omx/MediaCodecReader.cpp
@@ -677,17 +677,17 @@ MediaCodecReader::AsyncReadMetadata()
     return MediaDecoderReader::MetadataPromise::CreateAndReject(
              ReadMetadataFailureReason::METADATA_ERROR, __func__);
   }
 
   nsRefPtr<MediaDecoderReader::MetadataPromise> p = mMetadataPromise.Ensure(__func__);
 
   nsRefPtr<MediaCodecReader> self = this;
   mMediaResourceRequest.Begin(CreateMediaCodecs()
-    ->Then(TaskQueue(), __func__,
+    ->Then(OwnerThread(), __func__,
       [self] (bool) -> void {
         self->mMediaResourceRequest.Complete();
         self->HandleResourceAllocated();
       }, [self] (bool) -> void {
         self->mMediaResourceRequest.Complete();
         self->mMetadataPromise.Reject(ReadMetadataFailureReason::METADATA_ERROR, __func__);
       }));
 
--- a/dom/media/omx/MediaOmxCommonDecoder.cpp
+++ b/dom/media/omx/MediaOmxCommonDecoder.cpp
@@ -116,17 +116,17 @@ MediaOmxCommonDecoder::PauseStateMachine
     return;
   }
   // enter dormant state
   RefPtr<nsRunnable> event =
     NS_NewRunnableMethodWithArg<bool>(
       GetStateMachine(),
       &MediaDecoderStateMachine::SetDormant,
       true);
-  GetStateMachine()->TaskQueue()->Dispatch(event.forget());
+  GetStateMachine()->OwnerThread()->Dispatch(event.forget());
 }
 
 void
 MediaOmxCommonDecoder::ResumeStateMachine()
 {
   MOZ_ASSERT(NS_IsMainThread());
   ReentrantMonitorAutoEnter mon(GetReentrantMonitor());
   DECODER_LOG(LogLevel::Debug, ("%s current time %f", __PRETTY_FUNCTION__, mLogicalPosition));
@@ -145,27 +145,27 @@ MediaOmxCommonDecoder::ResumeStateMachin
                                  SeekTarget::Accurate,
                                  MediaDecoderEventVisibility::Suppressed);
   // Call Seek of MediaDecoderStateMachine to suppress seek events.
   RefPtr<nsRunnable> event =
     NS_NewRunnableMethodWithArg<SeekTarget>(
       GetStateMachine(),
       &MediaDecoderStateMachine::Seek,
       target);
-  GetStateMachine()->TaskQueue()->Dispatch(event.forget());
+  GetStateMachine()->OwnerThread()->Dispatch(event.forget());
 
   mNextState = mPlayState;
   ChangeState(PLAY_STATE_LOADING);
   // exit dormant state
   event =
     NS_NewRunnableMethodWithArg<bool>(
       GetStateMachine(),
       &MediaDecoderStateMachine::SetDormant,
       false);
-  GetStateMachine()->TaskQueue()->Dispatch(event.forget());
+  GetStateMachine()->OwnerThread()->Dispatch(event.forget());
   UpdateLogicalPosition();
 }
 
 void
 MediaOmxCommonDecoder::AudioOffloadTearDown()
 {
   MOZ_ASSERT(NS_IsMainThread());
   DECODER_LOG(LogLevel::Debug, ("%s", __PRETTY_FUNCTION__));
--- a/dom/media/omx/MediaOmxReader.cpp
+++ b/dom/media/omx/MediaOmxReader.cpp
@@ -239,17 +239,17 @@ MediaOmxReader::AsyncReadMetadata()
     mMP3FrameParser.SetLength(mDecoder->GetResource()->GetLength());
     ProcessCachedData(0);
   }
 
   nsRefPtr<MediaDecoderReader::MetadataPromise> p = mMetadataPromise.Ensure(__func__);
 
   nsRefPtr<MediaOmxReader> self = this;
   mMediaResourceRequest.Begin(mOmxDecoder->AllocateMediaResources()
-    ->Then(TaskQueue(), __func__,
+    ->Then(OwnerThread(), __func__,
       [self] (bool) -> void {
         self->mMediaResourceRequest.Complete();
         self->HandleResourceAllocated();
       }, [self] (bool) -> void {
         self->mMediaResourceRequest.Complete();
         self->mMetadataPromise.Reject(ReadMetadataFailureReason::METADATA_ERROR, __func__);
       }));
 
@@ -534,17 +534,17 @@ MediaOmxReader::Seek(int64_t aTarget, in
     // a sync point, whereas for video there are only keyframes once every few
     // seconds. So if we have both audio and video, we must seek the video
     // stream to the preceeding keyframe first, get the stream time, and then
     // seek the audio stream to match the video stream's time. Otherwise, the
     // audio and video streams won't be in sync after the seek.
     mVideoSeekTimeUs = aTarget;
 
     nsRefPtr<MediaOmxReader> self = this;
-    mSeekRequest.Begin(DecodeToFirstVideoData()->Then(TaskQueue(), __func__, [self] (VideoData* v) {
+    mSeekRequest.Begin(DecodeToFirstVideoData()->Then(OwnerThread(), __func__, [self] (VideoData* v) {
       self->mSeekRequest.Complete();
       self->mAudioSeekTimeUs = v->mTime;
       self->mSeekPromise.Resolve(self->mAudioSeekTimeUs, __func__);
     }, [self, aTarget] () {
       self->mSeekRequest.Complete();
       self->mAudioSeekTimeUs = aTarget;
       self->mSeekPromise.Resolve(aTarget, __func__);
     }));
@@ -596,17 +596,17 @@ int64_t MediaOmxReader::ProcessCachedDat
 
   int64_t bufferLength = std::min<int64_t>(resourceLength-aOffset, sReadSize);
   nsRefPtr<NotifyDataArrivedRunnable> runnable(
     new NotifyDataArrivedRunnable(this, bufferLength, aOffset, resourceLength));
 
   if (OnTaskQueue()) {
     runnable->Run();
   } else {
-    TaskQueue()->Dispatch(runnable.forget());
+    OwnerThread()->Dispatch(runnable.forget());
   }
 
   return resourceLength - aOffset - bufferLength;
 }
 
 android::sp<android::MediaSource> MediaOmxReader::GetAudioOffloadTrack()
 {
   if (!mOmxDecoder.get()) {
--- a/dom/media/raw/RawReader.cpp
+++ b/dom/media/raw/RawReader.cpp
@@ -255,17 +255,17 @@ RawReader::Seek(int64_t aTime, int64_t a
     MOZ_ASSERT(self->OnTaskQueue());
     NS_ENSURE_TRUE(!self->mShutdown, false);
     bool skip = false;
     return self->DecodeVideoFrame(skip, 0);
   }, [self, aTime] () {
     MOZ_ASSERT(self->OnTaskQueue());
     return self->mVideoQueue.Peek() &&
            self->mVideoQueue.Peek()->GetEndTime() >= aTime;
-  })->Then(TaskQueue(), __func__, [self, p, aTime] () {
+  })->Then(OwnerThread(), __func__, [self, p, aTime] () {
     while (self->mVideoQueue.GetSize() >= 2) {
       nsRefPtr<VideoData> releaseMe = self->mVideoQueue.PopFront();
     }
     p->Resolve(aTime, __func__);
   }, [self, p, frame] {
     self->mCurrentFrame = frame;
     self->mVideoQueue.Reset();
     p->Reject(NS_ERROR_FAILURE, __func__);
--- a/dom/media/webaudio/MediaBufferDecoder.cpp
+++ b/dom/media/webaudio/MediaBufferDecoder.cpp
@@ -240,24 +240,24 @@ private:
   SpeexResamplerState* mResampler;
 };
 
 void
 MediaDecodeTask::Decode()
 {
   MOZ_ASSERT(!NS_IsMainThread());
 
-  mBufferDecoder->BeginDecoding(mDecoderReader->TaskQueue());
+  mBufferDecoder->BeginDecoding(mDecoderReader->OwnerThread());
 
   // Tell the decoder reader that we are not going to play the data directly,
   // and that we should not reject files with more channels than the audio
   // backend support.
   mDecoderReader->SetIgnoreAudioOutputFormat();
 
-  mDecoderReader->AsyncReadMetadata()->Then(mDecoderReader->TaskQueue(), __func__, this,
+  mDecoderReader->AsyncReadMetadata()->Then(mDecoderReader->OwnerThread(), __func__, this,
                                        &MediaDecodeTask::OnMetadataRead,
                                        &MediaDecodeTask::OnMetadataNotRead);
 }
 
 void
 MediaDecodeTask::OnMetadataRead(MetadataHolder* aMetadata)
 {
   mMediaInfo = aMetadata->mInfo;
@@ -276,17 +276,17 @@ MediaDecodeTask::OnMetadataNotRead(ReadM
   mDecoderReader->Shutdown();
   ReportFailureOnMainThread(WebAudioDecodeJob::InvalidContent);
   return;
 }
 
 void
 MediaDecodeTask::RequestSample()
 {
-  mDecoderReader->RequestAudioData()->Then(mDecoderReader->TaskQueue(), __func__, this,
+  mDecoderReader->RequestAudioData()->Then(mDecoderReader->OwnerThread(), __func__, this,
                                            &MediaDecodeTask::SampleDecoded,
                                            &MediaDecodeTask::SampleNotDecoded);
 }
 
 void
 MediaDecodeTask::SampleDecoded(AudioData* aData)
 {
   MOZ_ASSERT(!NS_IsMainThread());
@@ -499,20 +499,20 @@ AsyncDecodeWebAudio(const char* aContent
   if (!task->CreateReader()) {
     nsCOMPtr<nsIRunnable> event =
       new ReportResultTask(aDecodeJob,
                            &WebAudioDecodeJob::OnFailure,
                            WebAudioDecodeJob::UnknownError);
     NS_DispatchToMainThread(event);
   } else {
     // If we did this without a temporary:
-    //   task->Reader()->TaskQueue()->Dispatch(task.forget())
+    //   task->Reader()->OwnerThread()->Dispatch(task.forget())
     // we might evaluate the task.forget() before calling Reader(). Enforce
     // a non-crashy order-of-operations.
-    MediaTaskQueue* taskQueue = task->Reader()->TaskQueue();
+    MediaTaskQueue* taskQueue = task->Reader()->OwnerThread();
     taskQueue->Dispatch(task.forget());
   }
 }
 
 WebAudioDecodeJob::WebAudioDecodeJob(const nsACString& aContentType,
                                      AudioContext* aContext,
                                      Promise* aPromise,
                                      DecodeSuccessCallback* aSuccessCallback,