author | Randell Jesup <rjesup@jesup.org> |
Thu, 17 Jul 2014 22:23:00 -0400 | |
changeset 194818 | d00f111241a87b538634ba8f1a91e35b6d7f5419 |
parent 194817 | 2b3118f75b54bf6318ef5594f0a0772803ebeee5 |
child 194819 | 727bd16b29d3bdcb44f6469575841c51f05d18d7 |
push id | 27157 |
push user | ryanvm@gmail.com |
push date | Fri, 18 Jul 2014 19:00:26 +0000 |
treeherder | mozilla-central@ecdb409898a6 [default view] [failures only] |
perfherder | [talos] [build metrics] [platform microbench] (compared to previous push) |
reviewers | pkerr, gcp |
bugs | 1039529 |
milestone | 33.0a1 |
first release with | nightly linux32
nightly linux64
nightly mac
nightly win32
nightly win64
|
last release without | nightly linux32
nightly linux64
nightly mac
nightly win32
nightly win64
|
--- a/content/media/webrtc/MediaEngine.h +++ b/content/media/webrtc/MediaEngine.h @@ -35,37 +35,47 @@ enum MediaEngineState { }; // We only support 1 audio and 1 video track for now. enum { kVideoTrack = 1, kAudioTrack = 2 }; +// includes everything from dom::MediaSourceEnum (really video sources), plus audio sources +enum MediaSourceType { + Camera = (int) dom::MediaSourceEnum::Camera, + Screen = (int) dom::MediaSourceEnum::Screen, + Application = (int) dom::MediaSourceEnum::Application, + Window, // = (int) dom::MediaSourceEnum::Window, // XXX bug 1038926 + //Browser = (int) dom::MediaSourceEnum::Browser, // proposed in WG, unclear if it's useful + Microphone +}; + class MediaEngine { public: NS_INLINE_DECL_THREADSAFE_REFCOUNTING(MediaEngine) static const int DEFAULT_VIDEO_FPS = 30; static const int DEFAULT_VIDEO_MIN_FPS = 10; static const int DEFAULT_43_VIDEO_WIDTH = 640; static const int DEFAULT_43_VIDEO_HEIGHT = 480; static const int DEFAULT_169_VIDEO_WIDTH = 1280; static const int DEFAULT_169_VIDEO_HEIGHT = 720; static const int DEFAULT_AUDIO_TIMER_MS = 10; /* Populate an array of video sources in the nsTArray. Also include devices * that are currently unavailable. */ - virtual void EnumerateVideoDevices(dom::MediaSourceEnum, + virtual void EnumerateVideoDevices(MediaSourceType, nsTArray<nsRefPtr<MediaEngineVideoSource> >*) = 0; /* Populate an array of audio sources in the nsTArray. Also include devices * that are currently unavailable. */ - virtual void EnumerateAudioDevices(dom::MediaSourceEnum, + virtual void EnumerateAudioDevices(MediaSourceType, nsTArray<nsRefPtr<MediaEngineAudioSource> >*) = 0; protected: virtual ~MediaEngine() {} }; /** * Common abstract base class for audio and video sources. @@ -114,16 +124,19 @@ public: bool aNoiseOn, uint32_t aNoise, int32_t aPlayoutDelay) = 0; /* Returns true if a source represents a fake capture device and * false otherwise */ virtual bool IsFake() = 0; + /* Returns the type of media source (camera, microphone, screen, window, etc) */ + virtual const MediaSourceType GetMediaSource() = 0; + /* Return false if device is currently allocated or started */ bool IsAvailable() { if (mState == kAllocated || mState == kStarted) { return false; } else { return true; } } @@ -180,18 +193,18 @@ private: } }; class MediaEngineVideoSource : public MediaEngineSource { public: virtual ~MediaEngineVideoSource() {} - virtual const dom::MediaSourceEnum GetMediaSource() { - return dom::MediaSourceEnum::Camera; + virtual const MediaSourceType GetMediaSource() { + return MediaSourceType::Camera; } /* This call reserves but does not start the device. */ virtual nsresult Allocate(const VideoTrackConstraintsN &aConstraints, const MediaEnginePrefs &aPrefs) = 0; }; /** * Audio source and friends.
--- a/content/media/webrtc/MediaEngineDefault.cpp +++ b/content/media/webrtc/MediaEngineDefault.cpp @@ -473,37 +473,37 @@ MediaEngineDefaultAudioSource::Notify(ns channels.AppendElement(dest); segment.AppendFrames(buffer.forget(), channels, AUDIO_FRAME_LENGTH); mSource->AppendToTrack(mTrackID, &segment); return NS_OK; } void -MediaEngineDefault::EnumerateVideoDevices(dom::MediaSourceEnum aMediaSource, +MediaEngineDefault::EnumerateVideoDevices(MediaSourceType aMediaSource, nsTArray<nsRefPtr<MediaEngineVideoSource> >* aVSources) { MutexAutoLock lock(mMutex); // only supports camera sources (for now). See Bug 1038241 - if (aMediaSource != dom::MediaSourceEnum::Camera) { + if (aMediaSource != MediaSourceType::Camera) { return; } // We once had code here to find a VideoSource with the same settings and re-use that. // This no longer is possible since the resolution is being set in Allocate(). nsRefPtr<MediaEngineVideoSource> newSource = new MediaEngineDefaultVideoSource(); mVSources.AppendElement(newSource); aVSources->AppendElement(newSource); return; } void -MediaEngineDefault::EnumerateAudioDevices(dom::MediaSourceEnum aMediaSource, +MediaEngineDefault::EnumerateAudioDevices(MediaSourceType aMediaSource, nsTArray<nsRefPtr<MediaEngineAudioSource> >* aASources) { MutexAutoLock lock(mMutex); int32_t len = mASources.Length(); // aMediaSource is ignored for audio devices (for now). for (int32_t i = 0; i < len; i++) { nsRefPtr<MediaEngineAudioSource> source = mASources.ElementAt(i);
--- a/content/media/webrtc/MediaEngineDefault.h +++ b/content/media/webrtc/MediaEngineDefault.h @@ -56,16 +56,20 @@ public: TrackID aId, StreamTime aDesiredTime, TrackTicks &aLastEndTime); virtual bool IsFake() { return true; } + virtual const MediaSourceType GetMediaSource() { + return MediaSourceType::Camera; + } + NS_DECL_THREADSAFE_ISUPPORTS NS_DECL_NSITIMERCALLBACK protected: ~MediaEngineDefaultVideoSource(); friend class MediaEngineDefault; @@ -112,16 +116,20 @@ public: TrackID aId, StreamTime aDesiredTime, TrackTicks &aLastEndTime) {} virtual bool IsFake() { return true; } + virtual const MediaSourceType GetMediaSource() { + return MediaSourceType::Microphone; + } + NS_DECL_THREADSAFE_ISUPPORTS NS_DECL_NSITIMERCALLBACK protected: ~MediaEngineDefaultAudioSource(); TrackID mTrackID; nsCOMPtr<nsITimer> mTimer; @@ -133,19 +141,19 @@ protected: class MediaEngineDefault : public MediaEngine { public: MediaEngineDefault() : mMutex("mozilla::MediaEngineDefault") {} - virtual void EnumerateVideoDevices(dom::MediaSourceEnum, + virtual void EnumerateVideoDevices(MediaSourceType, nsTArray<nsRefPtr<MediaEngineVideoSource> >*); - virtual void EnumerateAudioDevices(dom::MediaSourceEnum, + virtual void EnumerateAudioDevices(MediaSourceType, nsTArray<nsRefPtr<MediaEngineAudioSource> >*); private: ~MediaEngineDefault() {} Mutex mMutex; // protected with mMutex:
--- a/content/media/webrtc/MediaEngineWebRTC.cpp +++ b/content/media/webrtc/MediaEngineWebRTC.cpp @@ -68,24 +68,24 @@ MediaEngineWebRTC::MediaEngineWebRTC(Med // XXX gFarendObserver = new AudioOutputObserver(); NS_NewNamedThread("AudioGUM", getter_AddRefs(mThread)); MOZ_ASSERT(mThread); } void -MediaEngineWebRTC::EnumerateVideoDevices(dom::MediaSourceEnum aMediaSource, +MediaEngineWebRTC::EnumerateVideoDevices(MediaSourceType aMediaSource, nsTArray<nsRefPtr<MediaEngineVideoSource> >* aVSources) { // We spawn threads to handle gUM runnables, so we must protect the member vars MutexAutoLock lock(mMutex); #ifdef MOZ_B2G_CAMERA - if (aMediaSource != dom::MediaSourceEnum::Camera) { + if (aMediaSource != MediaSourceType::Camera) { // only supports camera sources return; } /** * We still enumerate every time, in case a new device was plugged in since * the last call. TODO: Verify that WebRTC actually does deal with hotplugging * new devices (with or without new engine creation) and accordingly adjust. @@ -133,50 +133,50 @@ MediaEngineWebRTC::EnumerateVideoDevices if (webrtc::VideoEngine::SetAndroidObjects(jvm) != 0) { LOG(("VieCapture:SetAndroidObjects Failed")); return; } #endif switch (aMediaSource) { - case dom::MediaSourceEnum::Window: + case MediaSourceType::Window: mWinEngineConfig.Set<webrtc::CaptureDeviceInfo>( new webrtc::CaptureDeviceInfo(webrtc::CaptureDeviceType::Window)); if (!mWinEngine) { if (!(mWinEngine = webrtc::VideoEngine::Create(mWinEngineConfig))) { return; } } videoEngine = mWinEngine; videoEngineInit = &mWinEngineInit; break; - case dom::MediaSourceEnum::Application: + case MediaSourceType::Application: mAppEngineConfig.Set<webrtc::CaptureDeviceInfo>( new webrtc::CaptureDeviceInfo(webrtc::CaptureDeviceType::Application)); if (!mAppEngine) { if (!(mAppEngine = webrtc::VideoEngine::Create(mAppEngineConfig))) { return; } } videoEngine = mAppEngine; videoEngineInit = &mAppEngineInit; break; - case dom::MediaSourceEnum::Screen: + case MediaSourceType::Screen: mScreenEngineConfig.Set<webrtc::CaptureDeviceInfo>( new webrtc::CaptureDeviceInfo(webrtc::CaptureDeviceType::Screen)); if (!mScreenEngine) { if (!(mScreenEngine = webrtc::VideoEngine::Create(mScreenEngineConfig))) { return; } } videoEngine = mScreenEngine; videoEngineInit = &mScreenEngineInit; break; - case dom::MediaSourceEnum::Camera: + case MediaSourceType::Camera: // fall through default: if (!mVideoEngine) { if (!(mVideoEngine = webrtc::VideoEngine::Create())) { return; } } videoEngine = mVideoEngine; @@ -266,17 +266,17 @@ MediaEngineWebRTC::EnumerateVideoDevices if (mHasTabVideoSource) aVSources->AppendElement(new MediaEngineTabVideoSource()); return; #endif } void -MediaEngineWebRTC::EnumerateAudioDevices(dom::MediaSourceEnum aMediaSource, +MediaEngineWebRTC::EnumerateAudioDevices(MediaSourceType aMediaSource, nsTArray<nsRefPtr<MediaEngineAudioSource> >* aASources) { ScopedCustomReleasePtr<webrtc::VoEBase> ptrVoEBase; ScopedCustomReleasePtr<webrtc::VoEHardware> ptrVoEHw; // We spawn threads to handle gUM runnables, so we must protect the member vars MutexAutoLock lock(mMutex); #ifdef MOZ_WIDGET_ANDROID
--- a/content/media/webrtc/MediaEngineWebRTC.h +++ b/content/media/webrtc/MediaEngineWebRTC.h @@ -91,17 +91,17 @@ class MediaEngineWebRTCVideoSource : pub , public mozilla::hal::ScreenConfigurationObserver #else , public webrtc::ExternalRenderer #endif { public: #ifdef MOZ_B2G_CAMERA MediaEngineWebRTCVideoSource(int aIndex, - dom::MediaSourceEnum aMediaSource = dom::MediaSourceEnum::Camera) + MediaSourceType aMediaSource = MediaSourceType::Camera) : mCameraControl(nullptr) , mCallbackMonitor("WebRTCCamera.CallbackMonitor") , mRotation(0) , mBackCamera(false) , mCaptureIndex(aIndex) , mMediaSource(aMediaSource) , mMonitor("WebRTCCamera.Monitor") , mWidth(0) @@ -122,17 +122,17 @@ public: /** * Does DeliverFrame() support a null buffer and non-null handle * (video texture)? * XXX Investigate! Especially for Android/B2G */ virtual bool IsTextureSupported() { return false; } MediaEngineWebRTCVideoSource(webrtc::VideoEngine* aVideoEnginePtr, int aIndex, - dom::MediaSourceEnum aMediaSource = dom::MediaSourceEnum::Camera) + MediaSourceType aMediaSource = MediaSourceType::Camera) : mVideoEngine(aVideoEnginePtr) , mCaptureIndex(aIndex) , mFps(-1) , mMinFps(-1) , mMediaSource(aMediaSource) , mMonitor("WebRTCCamera.Monitor") , mWidth(0) , mHeight(0) @@ -164,17 +164,17 @@ public: TrackID aId, StreamTime aDesiredTime, TrackTicks &aLastEndTime); virtual bool IsFake() { return false; } - virtual const dom::MediaSourceEnum GetMediaSource() { + virtual const MediaSourceType GetMediaSource() { return mMediaSource; } #ifndef MOZ_B2G_CAMERA NS_DECL_THREADSAFE_ISUPPORTS #else // We are subclassed from CameraControlListener, which implements a // threadsafe reference-count for us. @@ -242,17 +242,17 @@ private: webrtc::ViECapture* mViECapture; webrtc::ViERender* mViERender; #endif webrtc::CaptureCapability mCapability; // Doesn't work on OS X. int mCaptureIndex; int mFps; // Track rate (30 fps by default) int mMinFps; // Min rate we want to accept - dom::MediaSourceEnum mMediaSource; // source of media (camera | application | screen) + MediaSourceType mMediaSource; // source of media (camera | application | screen) // mMonitor protects mImage access/changes, and transitions of mState // from kStarted to kStopped (which are combined with EndTrack() and // image changes). Note that mSources is not accessed from other threads // for video and is not protected. Monitor mMonitor; // Monitor for processing WebRTC frames. int mWidth, mHeight; nsRefPtr<layers::Image> mImage; @@ -322,16 +322,20 @@ public: TrackID aId, StreamTime aDesiredTime, TrackTicks &aLastEndTime); virtual bool IsFake() { return false; } + virtual const MediaSourceType GetMediaSource() { + return MediaSourceType::Microphone; + } + // VoEMediaProcess. void Process(int channel, webrtc::ProcessingTypes type, int16_t audio10ms[], int length, int samplingFreq, bool isStereo); NS_DECL_THREADSAFE_ISUPPORTS protected: @@ -385,19 +389,19 @@ class MediaEngineWebRTC : public MediaEn { public: MediaEngineWebRTC(MediaEnginePrefs &aPrefs); // Clients should ensure to clean-up sources video/audio sources // before invoking Shutdown on this class. void Shutdown(); - virtual void EnumerateVideoDevices(dom::MediaSourceEnum, + virtual void EnumerateVideoDevices(MediaSourceType, nsTArray<nsRefPtr<MediaEngineVideoSource> >*); - virtual void EnumerateAudioDevices(dom::MediaSourceEnum, + virtual void EnumerateAudioDevices(MediaSourceType, nsTArray<nsRefPtr<MediaEngineAudioSource> >*); private: ~MediaEngineWebRTC() { Shutdown(); #ifdef MOZ_B2G_CAMERA AsyncLatencyLogger::Get()->Release(); #endif // XXX
--- a/content/media/webrtc/MediaTrackConstraints.h +++ b/content/media/webrtc/MediaTrackConstraints.h @@ -41,16 +41,18 @@ public: mUnsupportedRequirement = true; } } } // treat MediaSource special because it's always required mRequired.mMediaSource = mMediaSource; + // we guarantee (int) equivalence from MediaSourceEnum ->MediaSourceType + // (but not the other way) if (mMediaSource != dom::MediaSourceEnum::Camera && mAdvanced.WasPassed()) { // iterate through advanced, forcing mediaSource to match "root" auto& array = mAdvanced.Value(); for (uint32_t i = 0; i < array.Length(); i++) { if (array[i].mMediaSource == dom::MediaSourceEnum::Camera) { array[i].mMediaSource = mMediaSource; } }
--- a/dom/media/MediaManager.cpp +++ b/dom/media/MediaManager.cpp @@ -306,17 +306,16 @@ VideoDevice::VideoDevice(MediaEngineVide #endif // MOZ_B2G_CAMERA // Kludge to test user-facing cameras on OSX. if (mName.Find(NS_LITERAL_STRING("Face")) != -1) { mHasFacingMode = true; mFacingMode = dom::VideoFacingModeEnum::User; } - // dom::MediaSourceEnum::Camera; mMediaSource = aSource->GetMediaSource(); } AudioDevice::AudioDevice(MediaEngineAudioSource* aSource) : MediaDevice(aSource) {} NS_IMETHODIMP MediaDevice::GetName(nsAString& aName) @@ -362,19 +361,24 @@ MediaDevice::GetFacingMode(nsAString& aF aFacingMode.Truncate(0); } return NS_OK; } NS_IMETHODIMP MediaDevice::GetMediaSource(nsAString& aMediaSource) { - - aMediaSource.Assign(NS_ConvertUTF8toUTF16( - dom::MediaSourceEnumValues::strings[uint32_t(mMediaSource)].value)); + if (mMediaSource == MediaSourceType::Microphone) { + aMediaSource.Assign(NS_LITERAL_STRING("microphone")); + } else if (mMediaSource == MediaSourceType::Window) { // this will go away + aMediaSource.Assign(NS_LITERAL_STRING("window")); + } else { // all the rest are shared + aMediaSource.Assign(NS_ConvertUTF8toUTF16( + dom::MediaSourceEnumValues::strings[uint32_t(mMediaSource)].value)); + } return NS_OK; } MediaEngineVideoSource* VideoDevice::GetSource() { return static_cast<MediaEngineVideoSource*>(&*mSource); } @@ -754,28 +758,29 @@ static bool SatisfyConstraintSet(const M typedef nsTArray<nsCOMPtr<nsIMediaDevice> > SourceSet; // Source getter that constrains list returned template<class SourceType, class ConstraintsType> static SourceSet * GetSources(MediaEngine *engine, ConstraintsType &aConstraints, - void (MediaEngine::* aEnumerate)(dom::MediaSourceEnum, nsTArray<nsRefPtr<SourceType> >*), + void (MediaEngine::* aEnumerate)(MediaSourceType, nsTArray<nsRefPtr<SourceType> >*), const char* media_device_name = nullptr) { ScopedDeletePtr<SourceSet> result(new SourceSet); const SourceType * const type = nullptr; nsString deviceName; // First collect sources SourceSet candidateSet; { nsTArray<nsRefPtr<SourceType> > sources; - (engine->*aEnumerate)(aConstraints.mMediaSource, &sources); + // all MediaSourceEnums are contained in MediaSourceType + (engine->*aEnumerate)((MediaSourceType)((int)aConstraints.mMediaSource), &sources); /** * We're allowing multiple tabs to access the same camera for parity * with Chrome. See bug 811757 for some of the issues surrounding * this decision. To disallow, we'd filter by IsAvailable() as we used * to. */ for (uint32_t len = sources.Length(), i = 0; i < len; i++) { sources[i]->GetName(deviceName); @@ -1909,17 +1914,18 @@ WindowsHashToArrayFunc (const uint64_t& // access and windows that are currently capturing media. We want // to return only the latter. See bug 975177. bool capturing = false; if (aData) { uint32_t length = aData->Length(); for (uint32_t i = 0; i < length; ++i) { nsRefPtr<GetUserMediaCallbackMediaStreamListener> listener = aData->ElementAt(i); - if (listener->CapturingVideo() || listener->CapturingAudio()) { + if (listener->CapturingVideo() || listener->CapturingAudio() || + listener->CapturingScreen() || listener->CapturingWindow()) { capturing = true; break; } } } if (capturing) array->AppendElement(window); @@ -1940,34 +1946,39 @@ MediaManager::GetActiveMediaCaptureWindo mActiveWindows.EnumerateRead(WindowsHashToArrayFunc, array); *aArray = array; return NS_OK; } NS_IMETHODIMP MediaManager::MediaCaptureWindowState(nsIDOMWindow* aWindow, bool* aVideo, - bool* aAudio) + bool* aAudio, bool *aScreenShare, + bool* aWindowShare) { NS_ASSERTION(NS_IsMainThread(), "Only call on main thread"); *aVideo = false; *aAudio = false; + *aScreenShare = false; + *aWindowShare = false; - nsresult rv = MediaCaptureWindowStateInternal(aWindow, aVideo, aAudio); + nsresult rv = MediaCaptureWindowStateInternal(aWindow, aVideo, aAudio, aScreenShare, aWindowShare); #ifdef DEBUG nsCOMPtr<nsPIDOMWindow> piWin = do_QueryInterface(aWindow); - LOG(("%s: window %lld capturing %s %s", __FUNCTION__, piWin ? piWin->WindowID() : -1, - *aVideo ? "video" : "", *aAudio ? "audio" : "")); + LOG(("%s: window %lld capturing %s %s %s %s", __FUNCTION__, piWin ? piWin->WindowID() : -1, + *aVideo ? "video" : "", *aAudio ? "audio" : "", + *aScreenShare ? "screenshare" : "", *aWindowShare ? "windowshare" : "")); #endif return rv; } nsresult MediaManager::MediaCaptureWindowStateInternal(nsIDOMWindow* aWindow, bool* aVideo, - bool* aAudio) + bool* aAudio, bool *aScreenShare, + bool* aWindowShare) { // We need to return the union of all streams in all innerwindows that // correspond to that outerwindow. // Iterate the docshell tree to find all the child windows, find // all the listeners for each one, get the booleans, and merge the // results. nsCOMPtr<nsPIDOMWindow> piWin = do_QueryInterface(aWindow); @@ -1986,37 +1997,37 @@ MediaManager::MediaCaptureWindowStateInt nsRefPtr<GetUserMediaCallbackMediaStreamListener> listener = listeners->ElementAt(i); if (listener->CapturingVideo()) { *aVideo = true; } if (listener->CapturingAudio()) { *aAudio = true; } - if (*aAudio && *aVideo) { - return NS_OK; // no need to continue iterating + if (listener->CapturingScreen()) { + *aScreenShare = true; + } + if (listener->CapturingWindow()) { + *aWindowShare = true; } } } } // iterate any children of *this* window (iframes, etc) nsCOMPtr<nsIDocShell> docShell = piWin->GetDocShell(); if (docShell) { int32_t i, count; docShell->GetChildCount(&count); for (i = 0; i < count; ++i) { nsCOMPtr<nsIDocShellTreeItem> item; docShell->GetChildAt(i, getter_AddRefs(item)); nsCOMPtr<nsPIDOMWindow> win = item ? item->GetWindow() : nullptr; - MediaCaptureWindowStateInternal(win, aVideo, aAudio); - if (*aAudio && *aVideo) { - return NS_OK; // no need to continue iterating - } + MediaCaptureWindowStateInternal(win, aVideo, aAudio, aScreenShare, aWindowShare); } } } return NS_OK; } void MediaManager::StopMediaStreams()
--- a/dom/media/MediaManager.h +++ b/dom/media/MediaManager.h @@ -100,26 +100,39 @@ public: } // mVideo/AudioSource are set by Activate(), so we assume they're capturing // if set and represent a real capture device. bool CapturingVideo() { NS_ASSERTION(NS_IsMainThread(), "Only call on main thread"); return mVideoSource && !mStopped && + mVideoSource->GetMediaSource() == MediaSourceType::Camera && (!mVideoSource->IsFake() || Preferences::GetBool("media.navigator.permission.fake")); } bool CapturingAudio() { NS_ASSERTION(NS_IsMainThread(), "Only call on main thread"); return mAudioSource && !mStopped && (!mAudioSource->IsFake() || Preferences::GetBool("media.navigator.permission.fake")); } + bool CapturingScreen() + { + NS_ASSERTION(NS_IsMainThread(), "Only call on main thread"); + return mVideoSource && !mStopped && + mVideoSource->GetMediaSource() == MediaSourceType::Screen; + } + bool CapturingWindow() + { + NS_ASSERTION(NS_IsMainThread(), "Only call on main thread"); + return mVideoSource && !mStopped && + mVideoSource->GetMediaSource() == MediaSourceType::Window; + } void SetStopped() { mStopped = true; } // implement in .cpp to avoid circular dependency with MediaOperationRunnable // Can be invoked from EITHER MainThread or MSG thread @@ -481,17 +494,17 @@ public: protected: virtual ~MediaDevice() {} MediaDevice(MediaEngineSource* aSource); nsString mName; nsString mID; bool mHasFacingMode; dom::VideoFacingModeEnum mFacingMode; - dom::MediaSourceEnum mMediaSource; + MediaSourceType mMediaSource; nsRefPtr<MediaEngineSource> mSource; }; class VideoDevice : public MediaDevice { public: VideoDevice(MediaEngineVideoSource* aSource); NS_IMETHOD GetType(nsAString& aType); @@ -574,17 +587,18 @@ private: void GetPrefs(nsIPrefBranch *aBranch, const char *aData); // Make private because we want only one instance of this class MediaManager(); ~MediaManager() {} nsresult MediaCaptureWindowStateInternal(nsIDOMWindow* aWindow, bool* aVideo, - bool* aAudio); + bool* aAudio, bool *aScreenShare, + bool* aWindowShare); void StopMediaStreams(); // ONLY access from MainThread so we don't need to lock WindowTable mActiveWindows; nsRefPtrHashtable<nsStringHashKey, GetUserMediaRunnable> mActiveCallbacks; nsClassHashtable<nsUint64HashKey, nsTArray<nsString>> mCallIds; // Always exists
--- a/dom/media/nsIMediaManager.idl +++ b/dom/media/nsIMediaManager.idl @@ -7,17 +7,18 @@ interface nsISupportsArray; interface nsIDOMWindow; %{C++ #define NS_MEDIAMANAGERSERVICE_CID {0xabc622ea, 0x9655, 0x4123, {0x80, 0xd9, 0x22, 0x62, 0x1b, 0xdd, 0x54, 0x65}} #define MEDIAMANAGERSERVICE_CONTRACTID "@mozilla.org/mediaManagerService;1" %} -[scriptable, builtinclass, uuid(2efff6ab-0e3e-4cc4-8f9b-4aaca59a1140)] +[scriptable, builtinclass, uuid(f431b523-4536-4ba7-a2c1-7e1bf670d32a)] interface nsIMediaManagerService : nsISupports { /* return a array of inner windows that have active captures */ readonly attribute nsISupportsArray activeMediaCaptureWindows; /* Get the capture state for the given window and all descendant windows (iframes, etc) */ - void mediaCaptureWindowState(in nsIDOMWindow aWindow, out boolean aVideo, out boolean aAudio); + void mediaCaptureWindowState(in nsIDOMWindow aWindow, out boolean aVideo, out boolean aAudio, + [optional] out boolean aScreenShare, [optional] out boolean aWindowShare); };