Bug 1330360 - Create new MSGs for each nsPIDOMWindow. r=jesup
authorPaul Adenot <paul@paul.cx>
Fri, 23 Jun 2017 16:18:34 -0700
changeset 366888 5de53323f3ad17102b9428fc3c06066055cabc73
parent 366887 1c4d113ff7fb3f2a65feb5d5be66a0e1b3df88e8
child 366889 e0c4f2ca44b4292881a56fab6bf584bf7d966299
push id32109
push usercbook@mozilla.com
push dateFri, 30 Jun 2017 11:00:05 +0000
treeherdermozilla-central@d536973fe668 [default view] [failures only]
perfherder[talos] [build metrics] [platform microbench] (compared to previous push)
reviewersjesup
bugs1330360
milestone56.0a1
first release with
nightly linux32
nightly linux64
nightly mac
nightly win32
nightly win64
last release without
nightly linux32
nightly linux64
nightly mac
nightly win32
nightly win64
Bug 1330360 - Create new MSGs for each nsPIDOMWindow. r=jesup MozReview-Commit-ID: 5m1MGcLmT7J
dom/html/HTMLMediaElement.cpp
dom/media/CanvasCaptureMediaStream.cpp
dom/media/DOMMediaStream.cpp
dom/media/MediaManager.cpp
dom/media/MediaStreamGraph.cpp
dom/media/MediaStreamGraph.h
dom/media/webaudio/AudioDestinationNode.cpp
dom/media/webspeech/synth/nsSpeechTask.cpp
media/webrtc/signaling/src/peerconnection/PeerConnectionImpl.cpp
media/webrtc/signaling/test/FakeMediaStreams.h
--- a/dom/html/HTMLMediaElement.cpp
+++ b/dom/html/HTMLMediaElement.cpp
@@ -3550,17 +3550,17 @@ HTMLMediaElement::MozCaptureStream(Error
   }
 
   if (!CanBeCaptured(false)) {
     aRv.Throw(NS_ERROR_FAILURE);
     return nullptr;
   }
 
   MediaStreamGraph* graph =
-    MediaStreamGraph::GetInstance(graphDriverType, mAudioChannel);
+    MediaStreamGraph::GetInstance(graphDriverType, mAudioChannel, window);
 
   RefPtr<DOMMediaStream> stream =
     CaptureStreamInternal(false, false, graph);
   if (!stream) {
     aRv.Throw(NS_ERROR_FAILURE);
     return nullptr;
   }
 
@@ -3581,17 +3581,17 @@ HTMLMediaElement::MozCaptureStreamUntilE
   }
 
   if (!CanBeCaptured(false)) {
     aRv.Throw(NS_ERROR_FAILURE);
     return nullptr;
   }
 
   MediaStreamGraph* graph =
-    MediaStreamGraph::GetInstance(graphDriverType, mAudioChannel);
+    MediaStreamGraph::GetInstance(graphDriverType, mAudioChannel, window);
 
   RefPtr<DOMMediaStream> stream =
     CaptureStreamInternal(true, false, graph);
   if (!stream) {
     aRv.Throw(NS_ERROR_FAILURE);
     return nullptr;
   }
 
@@ -7207,17 +7207,17 @@ HTMLMediaElement::AudioCaptureStreamChan
     nsCOMPtr<nsPIDOMWindowInner> window = OwnerDoc()->GetInnerWindow();
     if (!OwnerDoc()->GetInnerWindow()) {
       return;
     }
 
     uint64_t id = window->WindowID();
     MediaStreamGraph* msg =
       MediaStreamGraph::GetInstance(MediaStreamGraph::AUDIO_THREAD_DRIVER,
-                                    mAudioChannel);
+                                    mAudioChannel, window);
 
     if (GetSrcMediaStream()) {
       mCaptureStreamPort = msg->ConnectToCaptureStream(id, GetSrcMediaStream());
     } else {
       RefPtr<DOMMediaStream> stream =
         CaptureStreamInternal(false, false, msg);
       mCaptureStreamPort = msg->ConnectToCaptureStream(id, stream->GetPlaybackStream());
     }
--- a/dom/media/CanvasCaptureMediaStream.cpp
+++ b/dom/media/CanvasCaptureMediaStream.cpp
@@ -275,17 +275,18 @@ CanvasCaptureMediaStream::Init(const dom
 
 already_AddRefed<CanvasCaptureMediaStream>
 CanvasCaptureMediaStream::CreateSourceStream(nsPIDOMWindowInner* aWindow,
                                              HTMLCanvasElement* aCanvas)
 {
   RefPtr<CanvasCaptureMediaStream> stream = new CanvasCaptureMediaStream(aWindow, aCanvas);
   MediaStreamGraph* graph =
     MediaStreamGraph::GetInstance(MediaStreamGraph::SYSTEM_THREAD_DRIVER,
-                                  AudioChannel::Normal);
+                                  AudioChannel::Normal,
+                                  aWindow);
   stream->InitSourceStream(graph);
   return stream.forget();
 }
 
 FrameCaptureListener*
 CanvasCaptureMediaStream::FrameCaptureListener()
 {
   return mOutputStreamDriver;
--- a/dom/media/DOMMediaStream.cpp
+++ b/dom/media/DOMMediaStream.cpp
@@ -565,17 +565,17 @@ DOMMediaStream::Constructor(const Global
     }
     newStream->AddTrack(track);
   }
 
   if (!newStream->GetPlaybackStream()) {
     MOZ_ASSERT(aTracks.IsEmpty());
     MediaStreamGraph* graph =
       MediaStreamGraph::GetInstance(MediaStreamGraph::SYSTEM_THREAD_DRIVER,
-                                    AudioChannel::Normal);
+                                    AudioChannel::Normal, ownerWindow);
     newStream->InitPlaybackStreamCommon(graph);
   }
 
   return newStream.forget();
 }
 
 double
 DOMMediaStream::CurrentTime()
@@ -1571,17 +1571,17 @@ DOMHwMediaStream::~DOMHwMediaStream()
 already_AddRefed<DOMHwMediaStream>
 DOMHwMediaStream::CreateHwStream(nsPIDOMWindowInner* aWindow,
                                  OverlayImage* aImage)
 {
   RefPtr<DOMHwMediaStream> stream = new DOMHwMediaStream(aWindow);
 
   MediaStreamGraph* graph =
     MediaStreamGraph::GetInstance(MediaStreamGraph::SYSTEM_THREAD_DRIVER,
-                                  AudioChannel::Normal);
+                                  AudioChannel::Normal, aWindow);
   stream->InitSourceStream(graph);
   stream->Init(stream->GetInputStream(), aImage);
 
   return stream.forget();
 }
 
 void
 DOMHwMediaStream::Init(MediaStream* stream, OverlayImage* aImage)
--- a/dom/media/MediaManager.cpp
+++ b/dom/media/MediaManager.cpp
@@ -1063,17 +1063,17 @@ public:
       return NS_OK;
     }
 
     MediaStreamGraph::GraphDriverType graphDriverType =
       mAudioDevice ? MediaStreamGraph::AUDIO_THREAD_DRIVER
                    : MediaStreamGraph::SYSTEM_THREAD_DRIVER;
     MediaStreamGraph* msg =
       MediaStreamGraph::GetInstance(graphDriverType,
-                                    dom::AudioChannel::Normal);
+                                    dom::AudioChannel::Normal, window);
 
     RefPtr<DOMMediaStream> domStream;
     RefPtr<SourceMediaStream> stream;
     // AudioCapture is a special case, here, in the sense that we're not really
     // using the audio source and the SourceMediaStream, which acts as
     // placeholders. We re-route a number of stream internaly in the MSG and mix
     // them down instead.
     if (mAudioDevice &&
@@ -3648,17 +3648,17 @@ SourceListener::StopSharing()
   if (mAudioDevice &&
       mAudioDevice->GetMediaSource() == MediaSourceEnum::AudioCapture) {
     uint64_t windowID = mWindowListener->WindowID();
     nsCOMPtr<nsPIDOMWindowInner> window = nsGlobalWindow::GetInnerWindowWithId(windowID)->AsInner();
     MOZ_RELEASE_ASSERT(window);
     window->SetAudioCapture(false);
     MediaStreamGraph* graph =
       MediaStreamGraph::GetInstance(MediaStreamGraph::AUDIO_THREAD_DRIVER,
-                                    dom::AudioChannel::Normal);
+                                    dom::AudioChannel::Normal, window);
     graph->UnregisterCaptureStreamForWindow(windowID);
     mStream->Destroy();
   }
 }
 
 SourceMediaStream*
 SourceListener::GetSourceStream()
 {
--- a/dom/media/MediaStreamGraph.cpp
+++ b/dom/media/MediaStreamGraph.cpp
@@ -1701,20 +1701,23 @@ MediaStreamGraphImpl::RunInStableState(b
         // to shut down, and we don't want to do that in a stable state handler.
         mLifecycleState = LIFECYCLE_WAITING_FOR_THREAD_SHUTDOWN;
         LOG(LogLevel::Debug,
             ("Sending MediaStreamGraphShutDownRunnable %p", this));
         nsCOMPtr<nsIRunnable> event = new MediaStreamGraphShutDownRunnable(this );
         NS_DispatchToMainThread(event.forget());
 
         LOG(LogLevel::Debug, ("Disconnecting MediaStreamGraph %p", this));
-        MediaStreamGraphImpl* graph;
-        if (gGraphs.Get(uint32_t(mAudioChannel), &graph) && graph == this) {
-          // null out gGraph if that's the graph being shut down
-          gGraphs.Remove(uint32_t(mAudioChannel));
+
+        // Find the graph in the hash table and remove it.
+        for (auto iter = gGraphs.Iter(); !iter.Done(); iter.Next()) {
+          if (iter.UserData() == this) {
+            iter.Remove();
+            break;
+          }
         }
       }
     } else {
       if (mLifecycleState <= LIFECYCLE_WAITING_FOR_MAIN_THREAD_CLEANUP) {
         MessageBlock* block = mBackMessageQueue.AppendElement();
         block->mMessages.SwapElements(mCurrentTaskMessageQueue);
         EnsureNextIterationLocked();
       }
@@ -1856,19 +1859,22 @@ MediaStreamGraphImpl::AppendMessage(Uniq
 #endif
     aMessage->RunDuringShutdown();
 #ifdef DEBUG
     mCanRunMessagesSynchronously = true;
 #endif
     if (IsEmpty() &&
         mLifecycleState >= LIFECYCLE_WAITING_FOR_STREAM_DESTRUCTION) {
 
-      MediaStreamGraphImpl* graph;
-      if (gGraphs.Get(uint32_t(mAudioChannel), &graph) && graph == this) {
-        gGraphs.Remove(uint32_t(mAudioChannel));
+      // Find the graph in the hash table and remove it.
+      for (auto iter = gGraphs.Iter(); !iter.Done(); iter.Next()) {
+        if (iter.UserData() == this) {
+          iter.Remove();
+          break;
+        }
       }
 
       Destroy();
     }
     return;
   }
 
   mCurrentTaskMessageQueue.AppendElement(Move(aMessage));
@@ -3464,26 +3470,46 @@ MediaStreamGraphImpl::Destroy()
 {
   // First unregister from memory reporting.
   UnregisterWeakMemoryReporter(this);
 
   // Clear the self reference which will destroy this instance.
   mSelfRef = nullptr;
 }
 
+static
+uint32_t ChannelAndWindowToHash(dom::AudioChannel aChannel,
+                                nsPIDOMWindowInner* aWindow)
+{
+  uint32_t hashkey = 0;
+
+  hashkey = AddToHash(hashkey, static_cast<uint32_t>(aChannel));
+  hashkey = AddToHash(hashkey, aWindow);
+
+  return hashkey;
+}
+
 MediaStreamGraph*
 MediaStreamGraph::GetInstance(MediaStreamGraph::GraphDriverType aGraphDriverRequested,
-                              dom::AudioChannel aChannel)
+                              dom::AudioChannel aChannel,
+                              nsPIDOMWindowInner* aWindow)
 {
   NS_ASSERTION(NS_IsMainThread(), "Main thread only");
 
   uint32_t channel = static_cast<uint32_t>(aChannel);
   MediaStreamGraphImpl* graph = nullptr;
 
-  if (!gGraphs.Get(channel, &graph)) {
+  // We hash the AudioChannel and the nsPIDOMWindowInner together to form a key
+  // to the gloabl MediaStreamGraph hashtable. Effectively, this means there is
+  // a graph per document and AudioChannel.
+
+
+  uint32_t hashkey = ChannelAndWindowToHash(aChannel, aWindow);
+
+  if (!gGraphs.Get(hashkey, &graph)) {
     if (!gMediaStreamGraphShutdownBlocker) {
 
       class Blocker : public media::ShutdownBlocker
       {
       public:
         Blocker()
         : media::ShutdownBlocker(NS_LITERAL_STRING(
             "MediaStreamGraph shutdown: blocking on msg thread"))
@@ -3513,22 +3539,21 @@ MediaStreamGraph::GetInstance(MediaStrea
                      NS_LITERAL_STRING("MediaStreamGraph shutdown"));
       MOZ_RELEASE_ASSERT(NS_SUCCEEDED(rv));
     }
 
     graph = new MediaStreamGraphImpl(aGraphDriverRequested,
                                      CubebUtils::PreferredSampleRate(),
                                      aChannel);
 
-    gGraphs.Put(channel, graph);
+    gGraphs.Put(hashkey, graph);
 
     LOG(LogLevel::Debug,
-        ("Starting up MediaStreamGraph %p for channel %s",
-         graph,
-         AudioChannelValues::strings[channel].value));
+        ("Starting up MediaStreamGraph %p for channel %s and window %p",
+         graph, AudioChannelValues::strings[channel].value, aWindow));
   }
 
   return graph;
 }
 
 MediaStreamGraph*
 MediaStreamGraph::CreateNonRealtimeInstance(TrackRate aSampleRate)
 {
--- a/dom/media/MediaStreamGraph.h
+++ b/dom/media/MediaStreamGraph.h
@@ -17,16 +17,17 @@
 #include "mozilla/dom/AudioChannelBinding.h"
 #include "nsAutoPtr.h"
 #include "nsAutoRef.h"
 #include "nsIRunnable.h"
 #include "nsTArray.h"
 #include <speex/speex_resampler.h>
 
 class nsIRunnable;
+class nsPIDOMWindowInner;
 
 template <>
 class nsAutoRefTraits<SpeexResamplerState> : public nsPointerRefTraits<SpeexResamplerState>
 {
   public:
   static void Release(SpeexResamplerState* aState) { speex_resampler_destroy(aState); }
 };
 
@@ -1268,17 +1269,18 @@ public:
     AUDIO_THREAD_DRIVER,
     SYSTEM_THREAD_DRIVER,
     OFFLINE_THREAD_DRIVER
   };
   static const uint32_t AUDIO_CALLBACK_DRIVER_SHUTDOWN_TIMEOUT = 20*1000;
 
   // Main thread only
   static MediaStreamGraph* GetInstance(GraphDriverType aGraphDriverRequested,
-                                       dom::AudioChannel aChannel);
+                                       dom::AudioChannel aChannel,
+                                       nsPIDOMWindowInner* aWindow);
   static MediaStreamGraph* CreateNonRealtimeInstance(TrackRate aSampleRate);
   // Idempotent
   static void DestroyNonRealtimeInstance(MediaStreamGraph* aGraph);
 
   virtual nsresult OpenAudioInput(int aID,
                                   AudioDataListener *aListener) {
     return NS_ERROR_FAILURE;
   }
--- a/dom/media/webaudio/AudioDestinationNode.cpp
+++ b/dom/media/webaudio/AudioDestinationNode.cpp
@@ -330,19 +330,20 @@ AudioDestinationNode::AudioDestinationNo
               ChannelCountMode::Explicit, ChannelInterpretation::Speakers)
   , mFramesToProduce(aLength)
   , mAudioChannel(AudioChannel::Normal)
   , mIsOffline(aIsOffline)
   , mAudioChannelSuspended(false)
   , mCaptured(false)
   , mAudible(AudioChannelService::AudibleState::eAudible)
 {
+  nsPIDOMWindowInner* window = aContext->GetParentObject();
   MediaStreamGraph* graph = aIsOffline ?
                             MediaStreamGraph::CreateNonRealtimeInstance(aSampleRate) :
-                            MediaStreamGraph::GetInstance(MediaStreamGraph::AUDIO_THREAD_DRIVER, aChannel);
+                            MediaStreamGraph::GetInstance(MediaStreamGraph::AUDIO_THREAD_DRIVER, aChannel, window);
   AudioNodeEngine* engine = aIsOffline ?
                             new OfflineDestinationNodeEngine(this, aNumberOfChannels,
                                                              aLength, aSampleRate) :
                             static_cast<AudioNodeEngine*>(new DestinationNodeEngine(this));
 
   AudioNodeStream::Flags flags =
     AudioNodeStream::NEED_MAIN_THREAD_CURRENT_TIME |
     AudioNodeStream::NEED_MAIN_THREAD_FINISHED |
--- a/dom/media/webspeech/synth/nsSpeechTask.cpp
+++ b/dom/media/webspeech/synth/nsSpeechTask.cpp
@@ -167,18 +167,20 @@ nsSpeechTask::~nsSpeechTask()
     mPort->Destroy();
     mPort = nullptr;
   }
 }
 
 void
 nsSpeechTask::InitDirectAudio()
 {
+  // nullptr as final argument here means that this is not tied to a window.
+  // This is a global MSG.
   mStream = MediaStreamGraph::GetInstance(MediaStreamGraph::AUDIO_THREAD_DRIVER,
-                                          AudioChannel::Normal)->
+                                          AudioChannel::Normal, nullptr)->
     CreateSourceStream(AbstractThread::MainThread() /* Non DocGroup-version for the task in parent. */);
   mIndirectAudio = false;
   mInited = true;
 }
 
 void
 nsSpeechTask::InitIndirectAudio()
 {
--- a/media/webrtc/signaling/src/peerconnection/PeerConnectionImpl.cpp
+++ b/media/webrtc/signaling/src/peerconnection/PeerConnectionImpl.cpp
@@ -375,17 +375,17 @@ PeerConnectionImpl::~PeerConnectionImpl(
   // to release off a timer (and XPCOM Shutdown) to avoid churn
 }
 
 already_AddRefed<DOMMediaStream>
 PeerConnectionImpl::MakeMediaStream()
 {
   MediaStreamGraph* graph =
     MediaStreamGraph::GetInstance(MediaStreamGraph::AUDIO_THREAD_DRIVER,
-                                  AudioChannel::Normal);
+                                  AudioChannel::Normal, GetWindow());
 
   RefPtr<DOMMediaStream> stream =
     DOMMediaStream::CreateSourceStreamAsInput(GetWindow(), graph);
 
   CSFLogDebug(logTag, "Created media stream %p, inner: %p", stream.get(), stream->GetInputStream());
 
   return stream.forget();
 }
--- a/media/webrtc/signaling/test/FakeMediaStreams.h
+++ b/media/webrtc/signaling/test/FakeMediaStreams.h
@@ -58,17 +58,19 @@ class MediaStreamGraph {
 public:
   // Keep this in sync with the enum in MediaStreamGraph.h
   enum GraphDriverType {
     AUDIO_THREAD_DRIVER,
     SYSTEM_THREAD_DRIVER,
     OFFLINE_THREAD_DRIVER
   };
   static MediaStreamGraph* GetInstance(GraphDriverType aDriverType,
-                                       uint32_t aType) {
+                                       uint32_t aType,
+                                       nsPIDOMWindowInner* aWindow) {
+    // We don't care about the AudioChannel type nor the window here.
     if (gGraph) {
       return gGraph;
     }
     gGraph = new MediaStreamGraph();
     return gGraph;
   }
   uint32_t GraphRate() { return 16000; }
 };