Bug 907352 - Part 5: Wiring for width/height/frameRate gUM constraints. r=mt
authorJan-Ivar Bruaroey <jib@mozilla.com>
Fri, 18 Apr 2014 15:15:10 -0400
changeset 198943 66205061c13f7266750be6e0389796a386f24b44
parent 198942 f0edcb56a33aa4eff4f7aac6fd93ab93a33655f1
child 198944 093b21bd43f259da2d8934880e2a6e6e935f8e35
push id3624
push userasasaki@mozilla.com
push dateMon, 09 Jun 2014 21:49:01 +0000
treeherdermozilla-beta@b1a5da15899a [default view] [failures only]
perfherder[talos] [build metrics] [platform microbench] (compared to previous push)
reviewersmt
bugs907352
milestone31.0a1
first release with
nightly linux32
nightly linux64
nightly mac
nightly win32
nightly win64
last release without
nightly linux32
nightly linux64
nightly mac
nightly win32
nightly win64
Bug 907352 - Part 5: Wiring for width/height/frameRate gUM constraints. r=mt
content/media/webrtc/MediaEngine.h
content/media/webrtc/MediaEngineDefault.cpp
content/media/webrtc/MediaEngineDefault.h
content/media/webrtc/MediaEngineTabVideoSource.cpp
content/media/webrtc/MediaEngineTabVideoSource.h
content/media/webrtc/MediaEngineWebRTC.cpp
content/media/webrtc/MediaEngineWebRTC.h
content/media/webrtc/MediaEngineWebRTCAudio.cpp
content/media/webrtc/MediaEngineWebRTCVideo.cpp
dom/media/MediaManager.cpp
dom/media/MediaManager.h
--- a/content/media/webrtc/MediaEngine.h
+++ b/content/media/webrtc/MediaEngine.h
@@ -7,16 +7,19 @@
 
 #include "mozilla/RefPtr.h"
 #include "nsIDOMFile.h"
 #include "DOMMediaStream.h"
 #include "MediaStreamGraph.h"
 
 namespace mozilla {
 
+class VideoTrackConstraintsN;
+class AudioTrackConstraintsN;
+
 /**
  * Abstract interface for managing audio and video devices. Each platform
  * must implement a concrete class that will map these classes and methods
  * to the appropriate backend. For example, on Desktop platforms, these will
  * correspond to equivalent webrtc (GIPS) calls, and on B2G they will map to
  * a Gonk interface.
  */
 class MediaEngineVideoSource;
@@ -68,19 +71,16 @@ public:
   virtual ~MediaEngineSource() {}
 
   /* Populate the human readable name of this device in the nsAString */
   virtual void GetName(nsAString&) = 0;
 
   /* Populate the UUID of this device in the nsAString */
   virtual void GetUUID(nsAString&) = 0;
 
-  /* This call reserves but does not start the device. */
-  virtual nsresult Allocate(const MediaEnginePrefs &aPrefs) = 0;
-
   /* Release the device back to the system. */
   virtual nsresult Deallocate() = 0;
 
   /* Start the device and add the track to the provided SourceMediaStream, with
    * the provided TrackID. You may start appending data to the track
    * immediately after. */
   virtual nsresult Start(SourceMediaStream*, TrackID) = 0;
 
@@ -136,22 +136,31 @@ struct MediaEnginePrefs {
   int32_t mFPS;
   int32_t mMinFPS;
 };
 
 class MediaEngineVideoSource : public MediaEngineSource
 {
 public:
   virtual ~MediaEngineVideoSource() {}
+
+  /* This call reserves but does not start the device. */
+  virtual nsresult Allocate(const VideoTrackConstraintsN &aConstraints,
+                            const MediaEnginePrefs &aPrefs) = 0;
 };
 
 /**
  * Audio source and friends.
  */
 class MediaEngineAudioSource : public MediaEngineSource
 {
 public:
   virtual ~MediaEngineAudioSource() {}
+
+  /* This call reserves but does not start the device. */
+  virtual nsresult Allocate(const AudioTrackConstraintsN &aConstraints,
+                            const MediaEnginePrefs &aPrefs) = 0;
+
 };
 
 }
 
 #endif /* MEDIAENGINE_H_ */
--- a/content/media/webrtc/MediaEngineDefault.cpp
+++ b/content/media/webrtc/MediaEngineDefault.cpp
@@ -56,17 +56,18 @@ MediaEngineDefaultVideoSource::GetName(n
 void
 MediaEngineDefaultVideoSource::GetUUID(nsAString& aUUID)
 {
   aUUID.Assign(NS_LITERAL_STRING("1041FCBD-3F12-4F7B-9E9B-1EC556DD5676"));
   return;
 }
 
 nsresult
-MediaEngineDefaultVideoSource::Allocate(const MediaEnginePrefs &aPrefs)
+MediaEngineDefaultVideoSource::Allocate(const VideoTrackConstraintsN &aConstraints,
+                                        const MediaEnginePrefs &aPrefs)
 {
   if (mState != kReleased) {
     return NS_ERROR_FAILURE;
   }
 
   mOpts = aPrefs;
   mState = kAllocated;
   return NS_OK;
@@ -362,17 +363,18 @@ MediaEngineDefaultAudioSource::GetName(n
 void
 MediaEngineDefaultAudioSource::GetUUID(nsAString& aUUID)
 {
   aUUID.Assign(NS_LITERAL_STRING("B7CBD7C1-53EF-42F9-8353-73F61C70C092"));
   return;
 }
 
 nsresult
-MediaEngineDefaultAudioSource::Allocate(const MediaEnginePrefs &aPrefs)
+MediaEngineDefaultAudioSource::Allocate(const AudioTrackConstraintsN &aConstraints,
+                                        const MediaEnginePrefs &aPrefs)
 {
   if (mState != kReleased) {
     return NS_ERROR_FAILURE;
   }
 
   mState = kAllocated;
   // generate 1Khz sine wave
   mSineGenerator = new SineWaveGenerator(AUDIO_RATE);
--- a/content/media/webrtc/MediaEngineDefault.h
+++ b/content/media/webrtc/MediaEngineDefault.h
@@ -36,17 +36,18 @@ class MediaEngineDefaultVideoSource : pu
 {
 public:
   MediaEngineDefaultVideoSource();
   ~MediaEngineDefaultVideoSource();
 
   virtual void GetName(nsAString&);
   virtual void GetUUID(nsAString&);
 
-  virtual nsresult Allocate(const MediaEnginePrefs &aPrefs);
+  virtual nsresult Allocate(const VideoTrackConstraintsN &aConstraints,
+                            const MediaEnginePrefs &aPrefs);
   virtual nsresult Deallocate();
   virtual nsresult Start(SourceMediaStream*, TrackID);
   virtual nsresult Stop(SourceMediaStream*, TrackID);
   virtual nsresult Snapshot(uint32_t aDuration, nsIDOMFile** aFile);
   virtual nsresult Config(bool aEchoOn, uint32_t aEcho,
                           bool aAgcOn, uint32_t aAGC,
                           bool aNoiseOn, uint32_t aNoise,
                           int32_t aPlayoutDelay) { return NS_OK; };
@@ -89,17 +90,18 @@ class MediaEngineDefaultAudioSource : pu
 {
 public:
   MediaEngineDefaultAudioSource();
   ~MediaEngineDefaultAudioSource();
 
   virtual void GetName(nsAString&);
   virtual void GetUUID(nsAString&);
 
-  virtual nsresult Allocate(const MediaEnginePrefs &aPrefs);
+  virtual nsresult Allocate(const AudioTrackConstraintsN &aConstraints,
+                            const MediaEnginePrefs &aPrefs);
   virtual nsresult Deallocate();
   virtual nsresult Start(SourceMediaStream*, TrackID);
   virtual nsresult Stop(SourceMediaStream*, TrackID);
   virtual nsresult Snapshot(uint32_t aDuration, nsIDOMFile** aFile);
   virtual nsresult Config(bool aEchoOn, uint32_t aEcho,
                           bool aAgcOn, uint32_t aAGC,
                           bool aNoiseOn, uint32_t aNoise,
                           int32_t aPlayoutDelay) { return NS_OK; };
--- a/content/media/webrtc/MediaEngineTabVideoSource.cpp
+++ b/content/media/webrtc/MediaEngineTabVideoSource.cpp
@@ -114,17 +114,18 @@ MediaEngineTabVideoSource::GetName(nsASt
 
 void
 MediaEngineTabVideoSource::GetUUID(nsAString_internal& aUuid)
 {
   aUuid.Assign(NS_LITERAL_STRING("uuid"));
 }
 
 nsresult
-MediaEngineTabVideoSource::Allocate(const mozilla::MediaEnginePrefs&)
+MediaEngineTabVideoSource::Allocate(const VideoTrackConstraintsN&,
+                                    const MediaEnginePrefs&)
 {
   return NS_OK;
 }
 
 nsresult
 MediaEngineTabVideoSource::Deallocate()
 {
   return NS_OK;
--- a/content/media/webrtc/MediaEngineTabVideoSource.h
+++ b/content/media/webrtc/MediaEngineTabVideoSource.h
@@ -15,17 +15,18 @@ class MediaEngineTabVideoSource : public
   public:
     NS_DECL_THREADSAFE_ISUPPORTS
     NS_DECL_NSIDOMEVENTLISTENER
     NS_DECL_NSITIMERCALLBACK
     MediaEngineTabVideoSource();
 
     virtual void GetName(nsAString_internal&);
     virtual void GetUUID(nsAString_internal&);
-    virtual nsresult Allocate(const mozilla::MediaEnginePrefs&);
+    virtual nsresult Allocate(const VideoTrackConstraintsN &,
+                              const mozilla::MediaEnginePrefs&);
     virtual nsresult Deallocate();
     virtual nsresult Start(mozilla::SourceMediaStream*, mozilla::TrackID);
     virtual nsresult Snapshot(uint32_t, nsIDOMFile**);
     virtual void NotifyPull(mozilla::MediaStreamGraph*, mozilla::SourceMediaStream*, mozilla::TrackID, mozilla::StreamTime, mozilla::TrackTicks&);
     virtual nsresult Stop(mozilla::SourceMediaStream*, mozilla::TrackID);
     virtual nsresult Config(bool, uint32_t, bool, uint32_t, bool, uint32_t, int32_t);
     virtual bool IsFake();
     void Draw();
--- a/content/media/webrtc/MediaEngineWebRTC.cpp
+++ b/content/media/webrtc/MediaEngineWebRTC.cpp
@@ -27,16 +27,17 @@ GetUserMediaLog()
 }
 #endif
 
 #include "MediaEngineWebRTC.h"
 #include "ImageContainer.h"
 #include "nsIComponentRegistrar.h"
 #include "MediaEngineTabVideoSource.h"
 #include "nsITabSource.h"
+#include "MediaTrackConstraints.h"
 
 #ifdef MOZ_WIDGET_ANDROID
 #include "AndroidJNIWrapper.h"
 #include "AndroidBridge.h"
 #endif
 
 #undef LOG
 #define LOG(args) PR_LOG(GetUserMediaLog(), PR_LOG_DEBUG, args)
--- a/content/media/webrtc/MediaEngineWebRTC.h
+++ b/content/media/webrtc/MediaEngineWebRTC.h
@@ -137,17 +137,18 @@ public:
     Init();
   }
 #endif
 
   ~MediaEngineWebRTCVideoSource() { Shutdown(); }
 
   virtual void GetName(nsAString&);
   virtual void GetUUID(nsAString&);
-  virtual nsresult Allocate(const MediaEnginePrefs &aPrefs);
+  virtual nsresult Allocate(const VideoTrackConstraintsN &aConstraints,
+                            const MediaEnginePrefs &aPrefs);
   virtual nsresult Deallocate();
   virtual nsresult Start(SourceMediaStream*, TrackID);
   virtual nsresult Stop(SourceMediaStream*, TrackID);
   virtual nsresult Snapshot(uint32_t aDuration, nsIDOMFile** aFile);
   virtual nsresult Config(bool aEchoOn, uint32_t aEcho,
                           bool aAgcOn, uint32_t aAGC,
                           bool aNoiseOn, uint32_t aNoise,
                           int32_t aPlayoutDelay) { return NS_OK; };
@@ -242,17 +243,18 @@ private:
 
   bool mInitDone;
   bool mInSnapshotMode;
   nsString* mSnapshotPath;
 
   nsString mDeviceName;
   nsString mUniqueId;
 
-  void ChooseCapability(const MediaEnginePrefs &aPrefs);
+  void ChooseCapability(const VideoTrackConstraintsN &aConstraints,
+                        const MediaEnginePrefs &aPrefs);
 };
 
 class MediaEngineWebRTCAudioSource : public MediaEngineAudioSource,
                                      public webrtc::VoEMediaProcess
 {
 public:
   MediaEngineWebRTCAudioSource(webrtc::VoiceEngine* aVoiceEnginePtr, int aIndex,
     const char* name, const char* uuid)
@@ -275,17 +277,18 @@ public:
     mDeviceUUID.Assign(NS_ConvertUTF8toUTF16(uuid));
     Init();
   }
   ~MediaEngineWebRTCAudioSource() { Shutdown(); }
 
   virtual void GetName(nsAString&);
   virtual void GetUUID(nsAString&);
 
-  virtual nsresult Allocate(const MediaEnginePrefs &aPrefs);
+  virtual nsresult Allocate(const AudioTrackConstraintsN &aConstraints,
+                            const MediaEnginePrefs &aPrefs);
   virtual nsresult Deallocate();
   virtual nsresult Start(SourceMediaStream*, TrackID);
   virtual nsresult Stop(SourceMediaStream*, TrackID);
   virtual nsresult Snapshot(uint32_t aDuration, nsIDOMFile** aFile);
   virtual nsresult Config(bool aEchoOn, uint32_t aEcho,
                           bool aAgcOn, uint32_t aAGC,
                           bool aNoiseOn, uint32_t aNoise,
                           int32_t aPlayoutDelay);
--- a/content/media/webrtc/MediaEngineWebRTCAudio.cpp
+++ b/content/media/webrtc/MediaEngineWebRTCAudio.cpp
@@ -1,16 +1,17 @@
 /* This Source Code Form is subject to the terms of the Mozilla Public
  * License, v. 2.0. If a copy of the MPL was not distributed with this file,
  * You can obtain one at http://mozilla.org/MPL/2.0/. */
 
 #include "MediaEngineWebRTC.h"
 #include <stdio.h>
 #include <algorithm>
 #include "mozilla/Assertions.h"
+#include "MediaTrackConstraints.h"
 
 // scoped_ptr.h uses FF
 #ifdef FF
 #undef FF
 #endif
 #include "webrtc/modules/audio_device/opensl/single_rw_fifo.h"
 
 #define CHANNELS 1
@@ -237,17 +238,18 @@ MediaEngineWebRTCAudioSource::Config(boo
       0 != (error = mVoEProcessing->SetNsStatus(mNoiseOn, (webrtc::NsModes) aNoise))) {
       LOG(("%s Error setting NoiseSuppression Status: %d ",__FUNCTION__, error));
     }
   }
   return NS_OK;
 }
 
 nsresult
-MediaEngineWebRTCAudioSource::Allocate(const MediaEnginePrefs &aPrefs)
+MediaEngineWebRTCAudioSource::Allocate(const AudioTrackConstraintsN &aConstraints,
+                                       const MediaEnginePrefs &aPrefs)
 {
   if (mState == kReleased) {
     if (mInitDone) {
       ScopedCustomReleasePtr<webrtc::VoEHardware> ptrVoEHw(webrtc::VoEHardware::GetInterface(mVoiceEngine));
       if (!ptrVoEHw || ptrVoEHw->SetRecordingDevice(mCapIndex)) {
         return NS_ERROR_FAILURE;
       }
       mState = kAllocated;
--- a/content/media/webrtc/MediaEngineWebRTCVideo.cpp
+++ b/content/media/webrtc/MediaEngineWebRTCVideo.cpp
@@ -4,16 +4,17 @@
 
 #include "MediaEngineWebRTC.h"
 #include "Layers.h"
 #include "ImageTypes.h"
 #include "ImageContainer.h"
 #include "mozilla/layers/GrallocTextureClient.h"
 #include "nsMemory.h"
 #include "mtransport/runnable_utils.h"
+#include "MediaTrackConstraints.h"
 
 #ifdef MOZ_B2G_CAMERA
 #include "GrallocImages.h"
 #include "libyuv.h"
 #include "mozilla/Hal.h"
 #include "ScreenOrientation.h"
 using namespace mozilla::dom;
 #endif
@@ -162,17 +163,19 @@ MediaEngineWebRTCVideoSource::NotifyPull
     // we've removed or finished the track.
     if (aSource->AppendToTrack(aID, &(segment))) {
       aLastEndTime = target;
     }
   }
 }
 
 void
-MediaEngineWebRTCVideoSource::ChooseCapability(const MediaEnginePrefs &aPrefs)
+MediaEngineWebRTCVideoSource::ChooseCapability(
+    const VideoTrackConstraintsN &aConstraints,
+    const MediaEnginePrefs &aPrefs)
 {
 #ifdef MOZ_B2G_CAMERA
   mCapability.width  = aPrefs.mWidth;
   mCapability.height = aPrefs.mHeight;
 #else
   int num = mViECapture->NumberOfCapabilities(NS_ConvertUTF16toUTF8(mUniqueId).get(),
                                               KMaxUniqueIdLength);
 
@@ -229,36 +232,37 @@ MediaEngineWebRTCVideoSource::GetName(ns
 
 void
 MediaEngineWebRTCVideoSource::GetUUID(nsAString& aUUID)
 {
   aUUID = mUniqueId;
 }
 
 nsresult
-MediaEngineWebRTCVideoSource::Allocate(const MediaEnginePrefs &aPrefs)
+MediaEngineWebRTCVideoSource::Allocate(const VideoTrackConstraintsN &aConstraints,
+                                       const MediaEnginePrefs &aPrefs)
 {
   LOG((__FUNCTION__));
 #ifdef MOZ_B2G_CAMERA
   ReentrantMonitorAutoEnter sync(mCallbackMonitor);
   if (mState == kReleased && mInitDone) {
-    ChooseCapability(aPrefs);
+    ChooseCapability(aConstraints, aPrefs);
     NS_DispatchToMainThread(WrapRunnable(this,
                                          &MediaEngineWebRTCVideoSource::AllocImpl));
     mCallbackMonitor.Wait();
     if (mState != kAllocated) {
       return NS_ERROR_FAILURE;
     }
   }
 #else
   if (mState == kReleased && mInitDone) {
     // Note: if shared, we don't allow a later opener to affect the resolution.
     // (This may change depending on spec changes for Constraints/settings)
 
-    ChooseCapability(aPrefs);
+    ChooseCapability(aConstraints, aPrefs);
 
     if (mViECapture->AllocateCaptureDevice(NS_ConvertUTF16toUTF8(mUniqueId).get(),
                                            KMaxUniqueIdLength, mCaptureIndex)) {
       return NS_ERROR_FAILURE;
     }
     mState = kAllocated;
     LOG(("Video device %d allocated", mCaptureIndex));
   } else if (mSources.IsEmpty()) {
--- a/dom/media/MediaManager.cpp
+++ b/dom/media/MediaManager.cpp
@@ -257,23 +257,33 @@ protected:
   nsRefPtr<GetUserMediaCallbackMediaStreamListener> mListener;
 };
 
 /**
  * nsIMediaDevice implementation.
  */
 NS_IMPL_ISUPPORTS1(MediaDevice, nsIMediaDevice)
 
-MediaDevice::MediaDevice(MediaEngineVideoSource* aSource)
+MediaDevice* MediaDevice::Create(MediaEngineVideoSource* source) {
+  return new VideoDevice(source);
+}
+
+MediaDevice* MediaDevice::Create(MediaEngineAudioSource* source) {
+  return new AudioDevice(source);
+}
+
+MediaDevice::MediaDevice(MediaEngineSource* aSource)
   : mHasFacingMode(false)
   , mSource(aSource) {
-  mType.Assign(NS_LITERAL_STRING("video"));
   mSource->GetName(mName);
   mSource->GetUUID(mID);
+}
 
+VideoDevice::VideoDevice(MediaEngineVideoSource* aSource)
+  : MediaDevice(aSource) {
 #ifdef MOZ_B2G_CAMERA
   if (mName.EqualsLiteral("back")) {
     mHasFacingMode = true;
     mFacingMode = dom::VideoFacingModeEnum::Environment;
   } else if (mName.EqualsLiteral("front")) {
     mHasFacingMode = true;
     mFacingMode = dom::VideoFacingModeEnum::User;
   }
@@ -281,35 +291,43 @@ MediaDevice::MediaDevice(MediaEngineVide
 
   // Kludge to test user-facing cameras on OSX.
   if (mName.Find(NS_LITERAL_STRING("Face")) != -1) {
     mHasFacingMode = true;
     mFacingMode = dom::VideoFacingModeEnum::User;
   }
 }
 
-MediaDevice::MediaDevice(MediaEngineAudioSource* aSource)
-  : mHasFacingMode(false)
-  , mSource(aSource) {
-  mType.Assign(NS_LITERAL_STRING("audio"));
-  mSource->GetName(mName);
-  mSource->GetUUID(mID);
-}
+AudioDevice::AudioDevice(MediaEngineAudioSource* aSource)
+  : MediaDevice(aSource) {}
 
 NS_IMETHODIMP
 MediaDevice::GetName(nsAString& aName)
 {
   aName.Assign(mName);
   return NS_OK;
 }
 
 NS_IMETHODIMP
 MediaDevice::GetType(nsAString& aType)
 {
-  aType.Assign(mType);
+  return NS_OK;
+}
+
+NS_IMETHODIMP
+VideoDevice::GetType(nsAString& aType)
+{
+  aType.Assign(NS_LITERAL_STRING("video"));
+  return NS_OK;
+}
+
+NS_IMETHODIMP
+AudioDevice::GetType(nsAString& aType)
+{
+  aType.Assign(NS_LITERAL_STRING("audio"));
   return NS_OK;
 }
 
 NS_IMETHODIMP
 MediaDevice::GetId(nsAString& aID)
 {
   aID.Assign(mID);
   return NS_OK;
@@ -322,20 +340,26 @@ MediaDevice::GetFacingMode(nsAString& aF
     aFacingMode.Assign(NS_ConvertUTF8toUTF16(
         dom::VideoFacingModeEnumValues::strings[uint32_t(mFacingMode)].value));
   } else {
     aFacingMode.Truncate(0);
   }
   return NS_OK;
 }
 
-MediaEngineSource*
-MediaDevice::GetSource()
+MediaEngineVideoSource*
+VideoDevice::GetSource()
 {
-  return mSource;
+  return static_cast<MediaEngineVideoSource*>(&*mSource);
+}
+
+MediaEngineAudioSource*
+AudioDevice::GetSource()
+{
+  return static_cast<MediaEngineAudioSource*>(&*mSource);
 }
 
 /**
  * A subclass that we only use to stash internal pointers to MediaStreamGraph objects
  * that need to be cleaned up.
  */
 class nsDOMUserMediaStream : public DOMLocalMediaStream
 {
@@ -710,22 +734,22 @@ static SourceSet *
       * this decision.  To disallow, we'd filter by IsAvailable() as we used
       * to.
       */
     for (uint32_t len = sources.Length(), i = 0; i < len; i++) {
 #ifdef DEBUG
       sources[i]->GetName(deviceName);
       if (media_device_name && strlen(media_device_name) > 0)  {
         if (deviceName.EqualsASCII(media_device_name)) {
-          candidateSet.AppendElement(new MediaDevice(sources[i]));
+          candidateSet.AppendElement(MediaDevice::Create(sources[i]));
           break;
         }
       } else {
 #endif
-        candidateSet.AppendElement(new MediaDevice(sources[i]));
+        candidateSet.AppendElement(MediaDevice::Create(sources[i]));
 #ifdef DEBUG
       }
 #endif
     }
   }
 
   // Apply constraints to the list of sources.
 
@@ -945,25 +969,25 @@ public:
   nsresult
   SetContraints(const MediaStreamConstraints& aConstraints)
   {
     mConstraints = aConstraints;
     return NS_OK;
   }
 
   nsresult
-  SetAudioDevice(MediaDevice* aAudioDevice)
+  SetAudioDevice(AudioDevice* aAudioDevice)
   {
     mAudioDevice = aAudioDevice;
     mDeviceChosen = true;
     return NS_OK;
   }
 
   nsresult
-  SetVideoDevice(MediaDevice* aVideoDevice)
+  SetVideoDevice(VideoDevice* aVideoDevice)
   {
     mVideoDevice = aVideoDevice;
     mDeviceChosen = true;
     return NS_OK;
   }
 
   nsresult
   SelectDevice(MediaEngine* backend)
@@ -1001,31 +1025,32 @@ public:
     return NS_OK;
   }
 
   /**
    * Allocates a video or audio device and returns a MediaStream via
    * a GetUserMediaStreamRunnable. Runs off the main thread.
    */
   void
-  ProcessGetUserMedia(MediaEngineSource* aAudioSource, MediaEngineSource* aVideoSource)
+  ProcessGetUserMedia(MediaEngineAudioSource* aAudioSource,
+                      MediaEngineVideoSource* aVideoSource)
   {
     MOZ_ASSERT(mSuccess);
     MOZ_ASSERT(mError);
     nsresult rv;
     if (aAudioSource) {
-      rv = aAudioSource->Allocate(mPrefs);
+      rv = aAudioSource->Allocate(GetInvariant(mConstraints.mAudio), mPrefs);
       if (NS_FAILED(rv)) {
         LOG(("Failed to allocate audiosource %d",rv));
         Fail(NS_LITERAL_STRING("HARDWARE_UNAVAILABLE"));
         return;
       }
     }
     if (aVideoSource) {
-      rv = aVideoSource->Allocate(mPrefs);
+      rv = aVideoSource->Allocate(GetInvariant(mConstraints.mVideo), mPrefs);
       if (NS_FAILED(rv)) {
         LOG(("Failed to allocate videosource %d\n",rv));
         if (aAudioSource) {
           aAudioSource->Deallocate();
         }
         Fail(NS_LITERAL_STRING("HARDWARE_UNAVAILABLE"));
         return;
       }
@@ -1041,21 +1066,21 @@ public:
     return;
   }
 
   /**
    * Allocates a video device, takes a snapshot and returns a DOMFile via
    * a SuccessRunnable or an error via the ErrorRunnable. Off the main thread.
    */
   void
-  ProcessGetUserMediaSnapshot(MediaEngineSource* aSource, int aDuration)
+  ProcessGetUserMediaSnapshot(MediaEngineVideoSource* aSource, int aDuration)
   {
     MOZ_ASSERT(mSuccess);
     MOZ_ASSERT(mError);
-    nsresult rv = aSource->Allocate(mPrefs);
+    nsresult rv = aSource->Allocate(GetInvariant(mConstraints.mVideo), mPrefs);
     if (NS_FAILED(rv)) {
       Fail(NS_LITERAL_STRING("HARDWARE_UNAVAILABLE"));
       return;
     }
 
     /**
      * Display picture capture UI here before calling Snapshot() - Bug 748835.
      */
@@ -1075,18 +1100,18 @@ public:
 
 private:
   MediaStreamConstraints mConstraints;
 
   nsCOMPtr<nsIDOMGetUserMediaSuccessCallback> mSuccess;
   nsCOMPtr<nsIDOMGetUserMediaErrorCallback> mError;
   uint64_t mWindowID;
   nsRefPtr<GetUserMediaCallbackMediaStreamListener> mListener;
-  nsRefPtr<MediaDevice> mAudioDevice;
-  nsRefPtr<MediaDevice> mVideoDevice;
+  nsRefPtr<AudioDevice> mAudioDevice;
+  nsRefPtr<VideoDevice> mVideoDevice;
   MediaEnginePrefs mPrefs;
 
   bool mDeviceChosen;
 
   RefPtr<MediaEngine> mBackend;
   nsRefPtr<MediaManager> mManager; // get ref to this when creating the runnable
 };
 
@@ -1765,19 +1790,19 @@ MediaManager::Observe(nsISupports* aSubj
         nsCOMPtr<nsISupports> supports;
         array->GetElementAt(i,getter_AddRefs(supports));
         nsCOMPtr<nsIMediaDevice> device(do_QueryInterface(supports));
         MOZ_ASSERT(device); // shouldn't be returning anything else...
         if (device) {
           nsString type;
           device->GetType(type);
           if (type.EqualsLiteral("video")) {
-            runnable->SetVideoDevice(static_cast<MediaDevice*>(device.get()));
+            runnable->SetVideoDevice(static_cast<VideoDevice*>(device.get()));
           } else if (type.EqualsLiteral("audio")) {
-            runnable->SetAudioDevice(static_cast<MediaDevice*>(device.get()));
+            runnable->SetAudioDevice(static_cast<AudioDevice*>(device.get()));
           } else {
             NS_WARNING("Unknown device type in getUserMedia");
           }
         }
       }
     }
 
     // Reuse the same thread to save memory.
--- a/dom/media/MediaManager.h
+++ b/dom/media/MediaManager.h
@@ -436,30 +436,45 @@ typedef nsTArray<nsRefPtr<GetUserMediaCa
 typedef nsClassHashtable<nsUint64HashKey, StreamListeners> WindowTable;
 
 class MediaDevice : public nsIMediaDevice
 {
 public:
   NS_DECL_THREADSAFE_ISUPPORTS
   NS_DECL_NSIMEDIADEVICE
 
-  MediaDevice(MediaEngineVideoSource* aSource);
-  MediaDevice(MediaEngineAudioSource* aSource);
-  virtual ~MediaDevice() {}
+  static MediaDevice* Create(MediaEngineVideoSource* source);
+  static MediaDevice* Create(MediaEngineAudioSource* source);
 
-  MediaEngineSource* GetSource();
-private:
+  virtual ~MediaDevice() {}
+protected:
+  MediaDevice(MediaEngineSource* aSource);
   nsString mName;
-  nsString mType;
   nsString mID;
   bool mHasFacingMode;
   dom::VideoFacingModeEnum mFacingMode;
   nsRefPtr<MediaEngineSource> mSource;
 };
 
+class VideoDevice : public MediaDevice
+{
+public:
+  VideoDevice(MediaEngineVideoSource* aSource);
+  NS_IMETHOD GetType(nsAString& aType);
+  MediaEngineVideoSource* GetSource();
+};
+
+class AudioDevice : public MediaDevice
+{
+public:
+  AudioDevice(MediaEngineAudioSource* aSource);
+  NS_IMETHOD GetType(nsAString& aType);
+  MediaEngineAudioSource* GetSource();
+};
+
 class MediaManager MOZ_FINAL : public nsIMediaManagerService,
                                public nsIObserver
 {
 public:
   static already_AddRefed<MediaManager> GetInstance();
 
   // NOTE: never Dispatch(....,NS_DISPATCH_SYNC) to the MediaManager
   // thread from the MainThread, as we NS_DISPATCH_SYNC to MainThread