Bug 912342 - Pass in Audio/VideoDevice in place of Audio/VideoSource. r=jesup
authorJan-Ivar Bruaroey <jib@mozilla.com>
Sat, 19 Sep 2015 00:49:07 -0400
changeset 296084 e01353044b85bde8d04c0a332365f05c8e4adf3c
parent 296083 7e6cf02b174c0e25a9bcf571e383cebbb6c9c7eb
child 296085 6124439438d464ef26de8d5970006a94403a028e
push id5245
push userraliiev@mozilla.com
push dateThu, 29 Oct 2015 11:30:51 +0000
treeherdermozilla-beta@dac831dc1bd0 [default view] [failures only]
perfherder[talos] [build metrics] [platform microbench] (compared to previous push)
reviewersjesup
bugs912342
milestone43.0a1
first release with
nightly linux32
nightly linux64
nightly mac
nightly win32
nightly win64
last release without
nightly linux32
nightly linux64
nightly mac
nightly win32
nightly win64
Bug 912342 - Pass in Audio/VideoDevice in place of Audio/VideoSource. r=jesup
dom/media/MediaManager.cpp
dom/media/MediaManager.h
--- a/dom/media/MediaManager.cpp
+++ b/dom/media/MediaManager.cpp
@@ -221,27 +221,27 @@ HostHasPermission(nsIURI &docURI)
 class MediaOperationTask : public Task
 {
 public:
   // so we can send Stop without AddRef()ing from the MSG thread
   MediaOperationTask(MediaOperation aType,
     GetUserMediaCallbackMediaStreamListener* aListener,
     DOMMediaStream* aStream,
     DOMMediaStream::OnTracksAvailableCallback* aOnTracksAvailableCallback,
-    MediaEngineSource* aAudioSource,
-    MediaEngineSource* aVideoSource,
+    AudioDevice* aAudioDevice,
+    VideoDevice* aVideoDevice,
     bool aBool,
     uint64_t aWindowID,
     already_AddRefed<nsIDOMGetUserMediaErrorCallback> aError,
     const dom::MediaTrackConstraints& aConstraints = dom::MediaTrackConstraints())
     : mType(aType)
     , mStream(aStream)
     , mOnTracksAvailableCallback(aOnTracksAvailableCallback)
-    , mAudioSource(aAudioSource)
-    , mVideoSource(aVideoSource)
+    , mAudioDevice(aAudioDevice)
+    , mVideoDevice(aVideoDevice)
     , mListener(aListener)
     , mBool(aBool)
     , mWindowID(aWindowID)
     , mOnFailure(aError)
     , mConstraints(aConstraints)
   {}
 
   ~MediaOperationTask()
@@ -262,25 +262,25 @@ public:
       return;
 
     switch (mType) {
       case MEDIA_START:
         {
           NS_ASSERTION(!NS_IsMainThread(), "Never call on main thread");
           nsresult rv;
 
-          if (mAudioSource) {
-            rv = mAudioSource->Start(source, kAudioTrack);
+          if (mAudioDevice) {
+            rv = mAudioDevice->GetSource()->Start(source, kAudioTrack);
             if (NS_FAILED(rv)) {
               ReturnCallbackError(rv, "Starting audio failed");
               return;
             }
           }
-          if (mVideoSource) {
-            rv = mVideoSource->Start(source, kVideoTrack);
+          if (mVideoDevice) {
+            rv = mVideoDevice->GetSource()->Start(source, kVideoTrack);
             if (NS_FAILED(rv)) {
               ReturnCallbackError(rv, "Starting video failed");
               return;
             }
           }
           // Start() queued the tracks to be added synchronously to avoid races
           source->FinishAddTracks();
 
@@ -290,103 +290,103 @@ public:
           MM_LOG(("started all sources"));
           // Forward mOnTracksAvailableCallback to GetUserMediaNotificationEvent,
           // because mOnTracksAvailableCallback needs to be added to mStream
           // on the main thread.
           nsIRunnable *event =
             new GetUserMediaNotificationEvent(GetUserMediaNotificationEvent::STARTING,
                                               mStream.forget(),
                                               mOnTracksAvailableCallback.forget(),
-                                              mAudioSource != nullptr,
-                                              mVideoSource != nullptr,
+                                              mAudioDevice != nullptr,
+                                              mVideoDevice != nullptr,
                                               mWindowID, mOnFailure.forget());
           // event must always be released on mainthread due to the JS callbacks
           // in the TracksAvailableCallback
           NS_DispatchToMainThread(event);
         }
         break;
 
       case MEDIA_STOP:
       case MEDIA_STOP_TRACK:
         {
           NS_ASSERTION(!NS_IsMainThread(), "Never call on main thread");
-          if (mAudioSource) {
-            mAudioSource->Stop(source, kAudioTrack);
-            mAudioSource->Deallocate();
+          if (mAudioDevice) {
+            mAudioDevice->GetSource()->Stop(source, kAudioTrack);
+            mAudioDevice->GetSource()->Deallocate();
           }
-          if (mVideoSource) {
-            mVideoSource->Stop(source, kVideoTrack);
-            mVideoSource->Deallocate();
+          if (mVideoDevice) {
+            mVideoDevice->GetSource()->Stop(source, kVideoTrack);
+            mVideoDevice->GetSource()->Deallocate();
           }
           // Do this after stopping all tracks with EndTrack()
           if (mBool) {
             source->Finish();
           }
 
           nsIRunnable *event =
             new GetUserMediaNotificationEvent(mListener,
                                               mType == MEDIA_STOP ?
                                               GetUserMediaNotificationEvent::STOPPING :
                                               GetUserMediaNotificationEvent::STOPPED_TRACK,
-                                              mAudioSource != nullptr,
-                                              mVideoSource != nullptr,
+                                              mAudioDevice != nullptr,
+                                              mVideoDevice != nullptr,
                                               mWindowID);
           // event must always be released on mainthread due to the JS callbacks
           // in the TracksAvailableCallback
           NS_DispatchToMainThread(event);
         }
         break;
 
       case MEDIA_APPLYCONSTRAINTS_TRACK:
         {
           nsRefPtr<MediaManager> mgr = MediaManager::GetInstance();
 
           NS_ASSERTION(!NS_IsMainThread(), "Never call on main thread");
-          if (mAudioSource) {
-            mAudioSource->Restart(mConstraints, mgr->mPrefs);
+          if (mAudioDevice) {
+            mAudioDevice->Restart(mConstraints, mgr->mPrefs);
           }
-          if (mVideoSource) {
-            mVideoSource->Restart(mConstraints, mgr->mPrefs);
+          if (mVideoDevice) {
+            mVideoDevice->Restart(mConstraints, mgr->mPrefs);
           }
 
           // Need to dispatch something back to main to resolve promise
           // redo this with pledge?
           nsIRunnable *event =
             new GetUserMediaNotificationEvent(mListener,
                                               GetUserMediaNotificationEvent::APPLIED_CONSTRAINTS,
-                                              mAudioSource != nullptr,
-                                              mVideoSource != nullptr,
+                                              mAudioDevice != nullptr,
+                                              mVideoDevice != nullptr,
                                               mWindowID);
           // event must always be released on mainthread due to the JS callbacks
           // in the TracksAvailableCallback
           NS_DispatchToMainThread(event);
         }
         break;
 
       case MEDIA_DIRECT_LISTENERS:
         {
           NS_ASSERTION(!NS_IsMainThread(), "Never call on main thread");
-          if (mVideoSource) {
-            mVideoSource->SetDirectListeners(mBool);
+          if (mVideoDevice) {
+            mVideoDevice->GetSource()->SetDirectListeners(mBool);
           }
         }
         break;
 
       default:
         MOZ_ASSERT(false,"invalid MediaManager operation");
         break;
     }
   }
 
 private:
   MediaOperation mType;
   nsRefPtr<DOMMediaStream> mStream;
   nsAutoPtr<DOMMediaStream::OnTracksAvailableCallback> mOnTracksAvailableCallback;
-  nsRefPtr<MediaEngineSource> mAudioSource; // threadsafe
-  nsRefPtr<MediaEngineSource> mVideoSource; // threadsafe
+  nsRefPtr<AudioDevice> mAudioDevice; // threadsafe
+  nsRefPtr<VideoDevice> mVideoDevice; // threadsafe
   nsRefPtr<GetUserMediaCallbackMediaStreamListener> mListener; // threadsafe
   bool mBool;
   uint64_t mWindowID;
   nsCOMPtr<nsIDOMGetUserMediaErrorCallback> mOnFailure;
   dom::MediaTrackConstraints mConstraints;
 };
 
 /**
@@ -660,33 +660,33 @@ nsresult AudioDevice::Restart(const dom:
  * that need to be cleaned up.
  */
 class nsDOMUserMediaStream : public DOMLocalMediaStream
 {
 public:
   static already_AddRefed<nsDOMUserMediaStream>
   CreateTrackUnionStream(nsIDOMWindow* aWindow,
                          GetUserMediaCallbackMediaStreamListener* aListener,
-                         MediaEngineSource* aAudioSource,
-                         MediaEngineSource* aVideoSource,
+                         AudioDevice* aAudioDevice,
+                         VideoDevice* aVideoDevice,
                          MediaStreamGraph* aMSG)
   {
     nsRefPtr<nsDOMUserMediaStream> stream = new nsDOMUserMediaStream(aListener,
-                                                                     aAudioSource,
-                                                                     aVideoSource);
+                                                                     aAudioDevice,
+                                                                     aVideoDevice);
     stream->InitTrackUnionStream(aWindow, aMSG);
     return stream.forget();
   }
 
   nsDOMUserMediaStream(GetUserMediaCallbackMediaStreamListener* aListener,
-                       MediaEngineSource *aAudioSource,
-                       MediaEngineSource *aVideoSource) :
+                       AudioDevice *aAudioDevice,
+                       VideoDevice *aVideoDevice) :
     mListener(aListener),
-    mAudioSource(aAudioSource),
-    mVideoSource(aVideoSource),
+    mAudioDevice(aAudioDevice),
+    mVideoDevice(aVideoDevice),
     mEchoOn(true),
     mAgcOn(false),
     mNoiseOn(true),
 #ifdef MOZ_WEBRTC
     mEcho(webrtc::kEcDefault),
     mAgc(webrtc::kAgcDefault),
     mNoise(webrtc::kNsDefault),
 #else
@@ -821,32 +821,32 @@ public:
     return this;
   }
 
   virtual MediaEngineSource* GetMediaEngine(TrackID aTrackID) override
   {
     // MediaEngine supports only one video and on video track now and TrackID is
     // fixed in MediaEngine.
     if (aTrackID == kVideoTrack) {
-      return mVideoSource;
+      return mVideoDevice ? mVideoDevice->GetSource() : nullptr;
     }
     else if (aTrackID == kAudioTrack) {
-      return mAudioSource;
+      return mAudioDevice ? mAudioDevice->GetSource() : nullptr;
     }
 
     return nullptr;
   }
 
   // The actual MediaStream is a TrackUnionStream. But these resources need to be
   // explicitly destroyed too.
   nsRefPtr<SourceMediaStream> mSourceStream;
   nsRefPtr<MediaInputPort> mPort;
   nsRefPtr<GetUserMediaCallbackMediaStreamListener> mListener;
-  nsRefPtr<MediaEngineSource> mAudioSource; // so we can turn on AEC
-  nsRefPtr<MediaEngineSource> mVideoSource;
+  nsRefPtr<AudioDevice> mAudioDevice; // so we can turn on AEC
+  nsRefPtr<VideoDevice> mVideoDevice;
   bool mEchoOn;
   bool mAgcOn;
   bool mNoiseOn;
   uint32_t mEcho;
   uint32_t mAgc;
   uint32_t mNoise;
   uint32_t mPlayoutDelay;
 };
@@ -889,21 +889,21 @@ class GetUserMediaStreamRunnable : publi
 {
 public:
   GetUserMediaStreamRunnable(
     nsCOMPtr<nsIDOMGetUserMediaSuccessCallback>& aOnSuccess,
     nsCOMPtr<nsIDOMGetUserMediaErrorCallback>& aOnFailure,
     uint64_t aWindowID,
     GetUserMediaCallbackMediaStreamListener* aListener,
     const nsCString& aOrigin,
-    MediaEngineSource* aAudioSource,
-    MediaEngineSource* aVideoSource,
+    AudioDevice* aAudioDevice,
+    VideoDevice* aVideoDevice,
     PeerIdentity* aPeerIdentity)
-    : mAudioSource(aAudioSource)
-    , mVideoSource(aVideoSource)
+    : mAudioDevice(aAudioDevice)
+    , mVideoDevice(aVideoDevice)
     , mWindowID(aWindowID)
     , mListener(aListener)
     , mOrigin(aOrigin)
     , mPeerIdentity(aPeerIdentity)
     , mManager(MediaManager::GetInstance())
   {
     mOnSuccess.swap(aOnSuccess);
     mOnFailure.swap(aOnFailure);
@@ -990,44 +990,44 @@ public:
         branch->GetBoolPref("media.getusermedia.noise_enabled", &noise_on);
         branch->GetIntPref("media.getusermedia.noise", &noise);
         branch->GetIntPref("media.getusermedia.playout_delay", &playout_delay);
       }
     }
 #endif
 
     MediaStreamGraph::GraphDriverType graphDriverType =
-      mAudioSource ? MediaStreamGraph::AUDIO_THREAD_DRIVER
+      mAudioDevice ? MediaStreamGraph::AUDIO_THREAD_DRIVER
                    : MediaStreamGraph::SYSTEM_THREAD_DRIVER;
     MediaStreamGraph* msg =
       MediaStreamGraph::GetInstance(graphDriverType,
                                     dom::AudioChannel::Normal);
 
     nsRefPtr<SourceMediaStream> stream = msg->CreateSourceStream(nullptr);
 
     nsRefPtr<DOMLocalMediaStream> domStream;
     // AudioCapture is a special case, here, in the sense that we're not really
     // using the audio source and the SourceMediaStream, which acts as
     // placeholders. We re-route a number of stream internaly in the MSG and mix
     // them down instead.
-    if (mAudioSource &&
-        mAudioSource->GetMediaSource() == dom::MediaSourceEnum::AudioCapture) {
+    if (mAudioDevice &&
+        mAudioDevice->GetMediaSource() == dom::MediaSourceEnum::AudioCapture) {
       domStream = DOMLocalMediaStream::CreateAudioCaptureStream(window, msg);
       // It should be possible to pipe the capture stream to anything. CORS is
       // not a problem here, we got explicit user content.
       domStream->SetPrincipal(window->GetExtantDoc()->NodePrincipal());
       msg->RegisterCaptureStreamForWindow(
             mWindowID, domStream->GetStream()->AsProcessedStream());
       window->SetAudioCapture(true);
     } else {
       // Normal case, connect the source stream to the track union stream to
       // avoid us blocking
       nsRefPtr<nsDOMUserMediaStream> trackunion =
         nsDOMUserMediaStream::CreateTrackUnionStream(window, mListener,
-                                                     mAudioSource, mVideoSource,
+                                                     mAudioDevice, mVideoDevice,
                                                      msg);
       trackunion->GetStream()->AsProcessedStream()->SetAutofinish(true);
       nsRefPtr<MediaInputPort> port = trackunion->GetStream()->AsProcessedStream()->
         AllocateInputPort(stream);
       trackunion->mSourceStream = stream;
       trackunion->mPort = port.forget();
       // Log the relationship between SourceMediaStream and TrackUnion stream
       // Make sure logger starts before capture
@@ -1062,52 +1062,52 @@ public:
       }
       return NS_OK;
     }
 
     // The listener was added at the beginning in an inactive state.
     // Activate our listener. We'll call Start() on the source when get a callback
     // that the MediaStream has started consuming. The listener is freed
     // when the page is invalidated (on navigation or close).
-    mListener->Activate(stream.forget(), mAudioSource, mVideoSource);
+    mListener->Activate(stream.forget(), mAudioDevice, mVideoDevice);
 
     // Note: includes JS callbacks; must be released on MainThread
     TracksAvailableCallback* tracksAvailableCallback =
       new TracksAvailableCallback(mManager, mOnSuccess, mWindowID, domStream);
 
     mListener->AudioConfig(aec_on, (uint32_t) aec,
                            agc_on, (uint32_t) agc,
                            noise_on, (uint32_t) noise,
                            playout_delay);
 
     // Dispatch to the media thread to ask it to start the sources,
     // because that can take a while.
     // Pass ownership of trackunion to the MediaOperationTask
     // to ensure it's kept alive until the MediaOperationTask runs (at least).
-    MediaManager::PostTask(
-      FROM_HERE, new MediaOperationTask(MEDIA_START, mListener, domStream,
-                                        tracksAvailableCallback, mAudioSource,
-                                        mVideoSource, false, mWindowID,
-                                        mOnFailure.forget()));
+    MediaManager::PostTask(FROM_HERE,
+        new MediaOperationTask(MEDIA_START, mListener, domStream,
+                               tracksAvailableCallback,
+                               mAudioDevice, mVideoDevice,
+                               false, mWindowID, mOnFailure.forget()));
     // We won't need mOnFailure now.
     mOnFailure = nullptr;
 
     if (!MediaManager::IsPrivateBrowsing(window)) {
       // Call GetOriginKey again, this time w/persist = true, to promote
       // deviceIds to persistent, in case they're not already. Fire'n'forget.
       nsRefPtr<Pledge<nsCString>> p = media::GetOriginKey(mOrigin, false, true);
     }
     return NS_OK;
   }
 
 private:
   nsCOMPtr<nsIDOMGetUserMediaSuccessCallback> mOnSuccess;
   nsCOMPtr<nsIDOMGetUserMediaErrorCallback> mOnFailure;
-  nsRefPtr<MediaEngineSource> mAudioSource;
-  nsRefPtr<MediaEngineSource> mVideoSource;
+  nsRefPtr<AudioDevice> mAudioDevice;
+  nsRefPtr<VideoDevice> mVideoDevice;
   uint64_t mWindowID;
   nsRefPtr<GetUserMediaCallbackMediaStreamListener> mListener;
   nsCString mOrigin;
   nsAutoPtr<PeerIdentity> mPeerIdentity;
   nsRefPtr<MediaManager> mManager; // get ref to this when creating the runnable
 };
 
 static bool
@@ -1403,23 +1403,20 @@ public:
         return;
       }
     }
     PeerIdentity* peerIdentity = nullptr;
     if (!mConstraints.mPeerIdentity.IsEmpty()) {
       peerIdentity = new PeerIdentity(mConstraints.mPeerIdentity);
     }
 
-    NS_DispatchToMainThread(do_AddRef(new GetUserMediaStreamRunnable(
-      mOnSuccess, mOnFailure, mWindowID, mListener, mOrigin,
-      (mAudioDevice? mAudioDevice->GetSource() : nullptr),
-      (mVideoDevice? mVideoDevice->GetSource() : nullptr),
-      peerIdentity
-    )));
-
+    NS_DispatchToMainThread(do_AddRef(
+        new GetUserMediaStreamRunnable(mOnSuccess, mOnFailure, mWindowID,
+                                       mListener, mOrigin, mAudioDevice,
+                                       mVideoDevice, peerIdentity)));
     MOZ_ASSERT(!mOnSuccess);
     MOZ_ASSERT(!mOnFailure);
   }
 
   nsresult
   Denied(const nsAString& aName,
          const nsAString& aMessage = EmptyString())
   {
@@ -3130,20 +3127,20 @@ MediaManager::IsActivelyCapturingOrHasAP
 
 void
 GetUserMediaCallbackMediaStreamListener::AudioConfig(bool aEchoOn,
               uint32_t aEcho,
               bool aAgcOn, uint32_t aAGC,
               bool aNoiseOn, uint32_t aNoise,
               int32_t aPlayoutDelay)
 {
-  if (mAudioSource) {
+  if (mAudioDevice) {
 #ifdef MOZ_WEBRTC
     MediaManager::PostTask(FROM_HERE,
-      NewRunnableMethod(mAudioSource.get(), &MediaEngineSource::Config,
+      NewRunnableMethod(mAudioDevice->GetSource(), &MediaEngineSource::Config,
                         aEchoOn, aEcho, aAgcOn, aAGC, aNoiseOn,
                         aNoise, aPlayoutDelay));
 #endif
   }
 }
 
 // Can be invoked from EITHER MainThread or MSG thread
 void
@@ -3151,38 +3148,38 @@ GetUserMediaCallbackMediaStreamListener:
 {
   // We can't take a chance on blocking here, so proxy this to another
   // thread.
   // Pass a ref to us (which is threadsafe) so it can query us for the
   // source stream info.
   MediaManager::PostTask(FROM_HERE,
     new MediaOperationTask(MEDIA_STOP,
                            this, nullptr, nullptr,
-                           mAudioSource, mVideoSource,
+                           mAudioDevice, mVideoDevice,
                            mFinished, mWindowID, nullptr));
 }
 
 // Doesn't kill audio
 // XXX refactor to combine with Invalidate()?
 void
 GetUserMediaCallbackMediaStreamListener::StopSharing()
 {
   NS_ASSERTION(NS_IsMainThread(), "Only call on main thread");
-  if (mVideoSource && !mStopped &&
-      (mVideoSource->GetMediaSource() == dom::MediaSourceEnum::Screen ||
-       mVideoSource->GetMediaSource() == dom::MediaSourceEnum::Application ||
-       mVideoSource->GetMediaSource() == dom::MediaSourceEnum::Window)) {
+  if (mVideoDevice && !mStopped &&
+      (mVideoDevice->GetMediaSource() == dom::MediaSourceEnum::Screen ||
+       mVideoDevice->GetMediaSource() == dom::MediaSourceEnum::Application ||
+       mVideoDevice->GetMediaSource() == dom::MediaSourceEnum::Window)) {
     // Stop the whole stream if there's no audio; just the video track if we have both
     MediaManager::PostTask(FROM_HERE,
-      new MediaOperationTask(mAudioSource ? MEDIA_STOP_TRACK : MEDIA_STOP,
+      new MediaOperationTask(mAudioDevice ? MEDIA_STOP_TRACK : MEDIA_STOP,
                              this, nullptr, nullptr,
-                             nullptr, mVideoSource,
+                             nullptr, mVideoDevice,
                              mFinished, mWindowID, nullptr));
-  } else if (mAudioSource &&
-             mAudioSource->GetMediaSource() == dom::MediaSourceEnum::AudioCapture) {
+  } else if (mAudioDevice &&
+             mAudioDevice->GetMediaSource() == dom::MediaSourceEnum::AudioCapture) {
     nsCOMPtr<nsPIDOMWindow> window = nsGlobalWindow::GetInnerWindowWithId(mWindowID);
     MOZ_ASSERT(window);
     window->SetAudioCapture(false);
     MediaStreamGraph* graph =
       MediaStreamGraph::GetInstance(MediaStreamGraph::AUDIO_THREAD_DRIVER,
                                     dom::AudioChannel::Normal);
     graph->UnregisterCaptureStreamForWindow(mWindowID);
     mStream->Destroy();
@@ -3192,48 +3189,48 @@ GetUserMediaCallbackMediaStreamListener:
 // ApplyConstraints for track
 
 void
 GetUserMediaCallbackMediaStreamListener::ApplyConstraintsToTrack(
     TrackID aID,
     bool aIsAudio,
     const MediaTrackConstraints& aConstraints)
 {
-  if (((aIsAudio && mAudioSource) ||
-       (!aIsAudio && mVideoSource)) && !mStopped)
+  if (((aIsAudio && mAudioDevice) ||
+       (!aIsAudio && mVideoDevice)) && !mStopped)
   {
     // XXX to support multiple tracks of a type in a stream, this should key off
     // the TrackID and not just the type
     MediaManager::PostTask(FROM_HERE,
       new MediaOperationTask(MEDIA_APPLYCONSTRAINTS_TRACK,
                              this, nullptr, nullptr,
-                             aIsAudio  ? mAudioSource.get() : nullptr,
-                             !aIsAudio ? mVideoSource.get() : nullptr,
+                             aIsAudio  ? mAudioDevice.get() : nullptr,
+                             !aIsAudio ? mVideoDevice.get() : nullptr,
                              mFinished, mWindowID, nullptr, aConstraints));
   } else {
     LOG(("gUM track %d applyConstraints, but we don't have type %s",
          aID, aIsAudio ? "audio" : "video"));
   }
 }
 
 // Stop backend for track
 
 void
 GetUserMediaCallbackMediaStreamListener::StopTrack(TrackID aID, bool aIsAudio)
 {
-  if (((aIsAudio && mAudioSource) ||
-       (!aIsAudio && mVideoSource)) && !mStopped)
+  if (((aIsAudio && mAudioDevice) ||
+       (!aIsAudio && mVideoDevice)) && !mStopped)
   {
     // XXX to support multiple tracks of a type in a stream, this should key off
     // the TrackID and not just the type
     MediaManager::PostTask(FROM_HERE,
       new MediaOperationTask(MEDIA_STOP_TRACK,
                              this, nullptr, nullptr,
-                             aIsAudio  ? mAudioSource.get() : nullptr,
-                             !aIsAudio ? mVideoSource.get() : nullptr,
+                             aIsAudio  ? mAudioDevice.get() : nullptr,
+                             !aIsAudio ? mVideoDevice.get() : nullptr,
                              mFinished, mWindowID, nullptr));
   } else {
     LOG(("gUM track %d ended, but we don't have type %s",
          aID, aIsAudio ? "audio" : "video"));
   }
 }
 
 // Called from the MediaStreamGraph thread
@@ -3248,17 +3245,17 @@ GetUserMediaCallbackMediaStreamListener:
 // Called from the MediaStreamGraph thread
 void
 GetUserMediaCallbackMediaStreamListener::NotifyDirectListeners(MediaStreamGraph* aGraph,
                                                                bool aHasListeners)
 {
   MediaManager::PostTask(FROM_HERE,
     new MediaOperationTask(MEDIA_DIRECT_LISTENERS,
                            this, nullptr, nullptr,
-                           mAudioSource, mVideoSource,
+                           mAudioDevice, mVideoDevice,
                            aHasListeners, mWindowID, nullptr));
 }
 
 // Called from the MediaStreamGraph thread
 // this can be in response to our own RemoveListener() (via ::Remove()), or
 // because the DOM GC'd the DOMLocalMediaStream/etc we're attached to.
 void
 GetUserMediaCallbackMediaStreamListener::NotifyRemoved(MediaStreamGraph* aGraph)
--- a/dom/media/MediaManager.h
+++ b/dom/media/MediaManager.h
@@ -49,16 +49,75 @@ namespace dom {
 struct MediaStreamConstraints;
 struct MediaTrackConstraints;
 struct MediaTrackConstraintSet;
 } // namespace dom
 
 extern PRLogModuleInfo* GetMediaManagerLog();
 #define MM_LOG(msg) MOZ_LOG(GetMediaManagerLog(), mozilla::LogLevel::Debug, msg)
 
+class MediaDevice : public nsIMediaDevice
+{
+public:
+  NS_DECL_THREADSAFE_ISUPPORTS
+  NS_DECL_NSIMEDIADEVICE
+
+  void SetId(const nsAString& aID);
+  virtual uint32_t GetBestFitnessDistance(
+      const nsTArray<const dom::MediaTrackConstraintSet*>& aConstraintSets);
+protected:
+  virtual ~MediaDevice() {}
+  explicit MediaDevice(MediaEngineSource* aSource, bool aIsVideo);
+  static uint32_t FitnessDistance(nsString aN,
+    const dom::OwningStringOrStringSequenceOrConstrainDOMStringParameters& aConstraint);
+private:
+  static bool StringsContain(const dom::OwningStringOrStringSequence& aStrings,
+                             nsString aN);
+  static uint32_t FitnessDistance(nsString aN,
+      const dom::ConstrainDOMStringParameters& aParams);
+protected:
+  nsString mName;
+  nsString mID;
+  dom::MediaSourceEnum mMediaSource;
+  nsRefPtr<MediaEngineSource> mSource;
+public:
+  dom::MediaSourceEnum GetMediaSource() {
+    return mMediaSource;
+  }
+  bool mIsVideo;
+};
+
+class VideoDevice : public MediaDevice
+{
+public:
+  typedef MediaEngineVideoSource Source;
+
+  explicit VideoDevice(Source* aSource);
+  NS_IMETHOD GetType(nsAString& aType);
+  Source* GetSource();
+  nsresult Allocate(const dom::MediaTrackConstraints &aConstraints,
+                    const MediaEnginePrefs &aPrefs);
+  nsresult Restart(const dom::MediaTrackConstraints &aConstraints,
+                   const MediaEnginePrefs &aPrefs);
+};
+
+class AudioDevice : public MediaDevice
+{
+public:
+  typedef MediaEngineAudioSource Source;
+
+  explicit AudioDevice(Source* aSource);
+  NS_IMETHOD GetType(nsAString& aType);
+  Source* GetSource();
+  nsresult Allocate(const dom::MediaTrackConstraints &aConstraints,
+                    const MediaEnginePrefs &aPrefs);
+  nsresult Restart(const dom::MediaTrackConstraints &aConstraints,
+                   const MediaEnginePrefs &aPrefs);
+};
+
 /**
  * This class is an implementation of MediaStreamListener. This is used
  * to Start() and Stop() the underlying MediaEngineSource when MediaStreams
  * are assigned and deassigned in content.
  */
 class GetUserMediaCallbackMediaStreamListener : public MediaStreamListener
 {
 public:
@@ -75,23 +134,23 @@ public:
   ~GetUserMediaCallbackMediaStreamListener()
   {
     unused << mMediaThread;
     // It's OK to release mStream on any thread; they have thread-safe
     // refcounts.
   }
 
   void Activate(already_AddRefed<SourceMediaStream> aStream,
-    MediaEngineSource* aAudioSource,
-    MediaEngineSource* aVideoSource)
+                AudioDevice* aAudioDevice,
+                VideoDevice* aVideoDevice)
   {
     NS_ASSERTION(NS_IsMainThread(), "Only call on main thread");
     mStream = aStream;
-    mAudioSource = aAudioSource;
-    mVideoSource = aVideoSource;
+    mAudioDevice = aAudioDevice;
+    mVideoDevice = aVideoDevice;
 
     mStream->AddListener(this);
   }
 
   MediaStream *Stream() // Can be used to test if Activate was called
   {
     return mStream;
   }
@@ -106,56 +165,60 @@ public:
 
   void StopSharing();
 
   void StopTrack(TrackID aID, bool aIsAudio);
 
   void ApplyConstraintsToTrack(TrackID aID, bool aIsAudio,
                                const dom::MediaTrackConstraints& aConstraints);
 
-  // mVideo/AudioSource are set by Activate(), so we assume they're capturing
+  // mVideo/AudioDevice are set by Activate(), so we assume they're capturing
   // if set and represent a real capture device.
   bool CapturingVideo()
   {
     NS_ASSERTION(NS_IsMainThread(), "Only call on main thread");
-    return mVideoSource && !mStopped &&
-           mVideoSource->GetMediaSource() == dom::MediaSourceEnum::Camera &&
-           (!mVideoSource->IsFake() ||
+    return mVideoDevice && !mStopped &&
+           mVideoDevice->GetMediaSource() == dom::MediaSourceEnum::Camera &&
+           (!mVideoDevice->GetSource()->IsFake() ||
             Preferences::GetBool("media.navigator.permission.fake"));
   }
   bool CapturingAudio()
   {
     NS_ASSERTION(NS_IsMainThread(), "Only call on main thread");
-    return mAudioSource && !mStopped &&
-           (!mAudioSource->IsFake() ||
+    return mAudioDevice && !mStopped &&
+           (!mAudioDevice->GetSource()->IsFake() ||
             Preferences::GetBool("media.navigator.permission.fake"));
   }
   bool CapturingScreen()
   {
     NS_ASSERTION(NS_IsMainThread(), "Only call on main thread");
-    return mVideoSource && !mStopped && !mVideoSource->IsAvailable() &&
-           mVideoSource->GetMediaSource() == dom::MediaSourceEnum::Screen;
+    return mVideoDevice && !mStopped &&
+           !mVideoDevice->GetSource()->IsAvailable() &&
+           mVideoDevice->GetMediaSource() == dom::MediaSourceEnum::Screen;
   }
   bool CapturingWindow()
   {
     NS_ASSERTION(NS_IsMainThread(), "Only call on main thread");
-    return mVideoSource && !mStopped && !mVideoSource->IsAvailable() &&
-           mVideoSource->GetMediaSource() == dom::MediaSourceEnum::Window;
+    return mVideoDevice && !mStopped &&
+           !mVideoDevice->GetSource()->IsAvailable() &&
+           mVideoDevice->GetMediaSource() == dom::MediaSourceEnum::Window;
   }
   bool CapturingApplication()
   {
     NS_ASSERTION(NS_IsMainThread(), "Only call on main thread");
-    return mVideoSource && !mStopped && !mVideoSource->IsAvailable() &&
-           mVideoSource->GetMediaSource() == dom::MediaSourceEnum::Application;
+    return mVideoDevice && !mStopped &&
+           !mVideoDevice->GetSource()->IsAvailable() &&
+           mVideoDevice->GetMediaSource() == dom::MediaSourceEnum::Application;
   }
   bool CapturingBrowser()
   {
     NS_ASSERTION(NS_IsMainThread(), "Only call on main thread");
-    return mVideoSource && !mStopped && mVideoSource->IsAvailable() &&
-           mVideoSource->GetMediaSource() == dom::MediaSourceEnum::Browser;
+    return mVideoDevice && !mStopped &&
+           mVideoDevice->GetSource()->IsAvailable() &&
+           mVideoDevice->GetMediaSource() == dom::MediaSourceEnum::Browser;
   }
 
   void SetStopped()
   {
     mStopped = true;
   }
 
   // implement in .cpp to avoid circular dependency with MediaOperationTask
@@ -186,21 +249,23 @@ public:
   }
 
   // Proxy NotifyPull() to sources
   virtual void
   NotifyPull(MediaStreamGraph* aGraph, StreamTime aDesiredTime) override
   {
     // Currently audio sources ignore NotifyPull, but they could
     // watch it especially for fake audio.
-    if (mAudioSource) {
-      mAudioSource->NotifyPull(aGraph, mStream, kAudioTrack, aDesiredTime);
+    if (mAudioDevice) {
+      mAudioDevice->GetSource()->NotifyPull(aGraph, mStream, kAudioTrack,
+                                            aDesiredTime);
     }
-    if (mVideoSource) {
-      mVideoSource->NotifyPull(aGraph, mStream, kVideoTrack, aDesiredTime);
+    if (mVideoDevice) {
+      mVideoDevice->GetSource()->NotifyPull(aGraph, mStream, kVideoTrack,
+                                            aDesiredTime);
     }
   }
 
   virtual void
   NotifyEvent(MediaStreamGraph* aGraph,
               MediaStreamListener::MediaStreamGraphEvent aEvent) override
   {
     switch (aEvent) {
@@ -236,18 +301,18 @@ private:
   uint64_t mWindowID;
 
   bool mStopped; // MainThread only
 
   // Set at Activate on MainThread
 
   // Accessed from MediaStreamGraph thread, MediaManager thread, and MainThread
   // No locking needed as they're only addrefed except on the MediaManager thread
-  nsRefPtr<MediaEngineSource> mAudioSource; // threadsafe refcnt
-  nsRefPtr<MediaEngineSource> mVideoSource; // threadsafe refcnt
+  nsRefPtr<AudioDevice> mAudioDevice; // threadsafe refcnt
+  nsRefPtr<VideoDevice> mVideoDevice; // threadsafe refcnt
   nsRefPtr<SourceMediaStream> mStream; // threadsafe refcnt
   bool mFinished;
 
   // Accessed from MainThread and MSG thread
   Mutex mLock; // protects mRemoved access from MainThread
   bool mRemoved;
 };
 
@@ -314,72 +379,16 @@ public:
 private:
   nsRefPtr<DOMMediaStream> mStream;
   nsAutoPtr<DOMMediaStream::OnTracksAvailableCallback> mOnTracksAvailableCallback;
 };
 
 typedef nsTArray<nsRefPtr<GetUserMediaCallbackMediaStreamListener> > StreamListeners;
 typedef nsClassHashtable<nsUint64HashKey, StreamListeners> WindowTable;
 
-class MediaDevice : public nsIMediaDevice
-{
-public:
-  NS_DECL_THREADSAFE_ISUPPORTS
-  NS_DECL_NSIMEDIADEVICE
-
-  void SetId(const nsAString& aID);
-  virtual uint32_t GetBestFitnessDistance(
-      const nsTArray<const dom::MediaTrackConstraintSet*>& aConstraintSets);
-protected:
-  virtual ~MediaDevice() {}
-  explicit MediaDevice(MediaEngineSource* aSource, bool aIsVideo);
-  static uint32_t FitnessDistance(nsString aN,
-    const dom::OwningStringOrStringSequenceOrConstrainDOMStringParameters& aConstraint);
-private:
-  static bool StringsContain(const dom::OwningStringOrStringSequence& aStrings,
-                             nsString aN);
-  static uint32_t FitnessDistance(nsString aN,
-      const dom::ConstrainDOMStringParameters& aParams);
-protected:
-  nsString mName;
-  nsString mID;
-  dom::MediaSourceEnum mMediaSource;
-  nsRefPtr<MediaEngineSource> mSource;
-public:
-  bool mIsVideo;
-};
-
-class VideoDevice : public MediaDevice
-{
-public:
-  typedef MediaEngineVideoSource Source;
-
-  explicit VideoDevice(Source* aSource);
-  NS_IMETHOD GetType(nsAString& aType);
-  Source* GetSource();
-  nsresult Allocate(const dom::MediaTrackConstraints &aConstraints,
-                    const MediaEnginePrefs &aPrefs);
-  nsresult Restart(const dom::MediaTrackConstraints &aConstraints,
-                   const MediaEnginePrefs &aPrefs);
-};
-
-class AudioDevice : public MediaDevice
-{
-public:
-  typedef MediaEngineAudioSource Source;
-
-  explicit AudioDevice(Source* aSource);
-  NS_IMETHOD GetType(nsAString& aType);
-  Source* GetSource();
-  nsresult Allocate(const dom::MediaTrackConstraints &aConstraints,
-                    const MediaEnginePrefs &aPrefs);
-  nsresult Restart(const dom::MediaTrackConstraints &aConstraints,
-                   const MediaEnginePrefs &aPrefs);
-};
-
 // we could add MediaManager if needed
 typedef void (*WindowListenerCallback)(MediaManager *aThis,
                                        uint64_t aWindowID,
                                        StreamListeners *aListeners,
                                        void *aData);
 
 class MediaManager final : public nsIMediaManagerService,
                            public nsIObserver