Bug 1073615 - One MediaStreamGraph singleton per audioChannel, r=roc
authorAndrea Marchesini <amarchesini@mozilla.com>
Mon, 17 Nov 2014 16:07:55 +0000
changeset 216128 acc3209d766a8d0314c55f49773312ad7e57ddde
parent 216127 6a1a0a357f116c1578d7455bf377a662d42449fa
child 216129 03cd5b31ae494ca8d8f55858c6bbbcd64232fe31
push id27842
push usercbook@mozilla.com
push dateTue, 18 Nov 2014 16:25:55 +0000
treeherdermozilla-central@084441e904d1 [default view] [failures only]
perfherder[talos] [build metrics] [platform microbench] (compared to previous push)
reviewersroc
bugs1073615
milestone36.0a1
first release with
nightly linux32
nightly linux64
nightly mac
nightly win32
nightly win64
last release without
nightly linux32
nightly linux64
nightly mac
nightly win32
nightly win64
Bug 1073615 - One MediaStreamGraph singleton per audioChannel, r=roc
dom/media/MediaStreamGraph.cpp
dom/media/MediaStreamGraphImpl.h
dom/media/webaudio/AudioContext.cpp
dom/media/webaudio/AudioContext.h
dom/media/webaudio/test/test_mozaudiochannel.html
dom/webidl/AudioContext.webidl
--- a/dom/media/MediaStreamGraph.cpp
+++ b/dom/media/MediaStreamGraph.cpp
@@ -60,17 +60,17 @@ PRLogModuleInfo* gMediaStreamGraphLog;
 #  endif
 #else
 #  define LIFECYCLE_LOG(...)
 #endif
 
 /**
  * The singleton graph instance.
  */
-static MediaStreamGraphImpl* gGraph;
+static nsDataHashtable<nsUint32HashKey, MediaStreamGraphImpl*> gGraphs;
 
 MediaStreamGraphImpl::~MediaStreamGraphImpl()
 {
   NS_ASSERTION(IsEmpty(),
                "All streams should have been destroyed by messages from the main thread");
   STREAM_LOG(PR_LOG_DEBUG, ("MediaStreamGraph %p destroyed", this));
   LIFECYCLE_LOG("MediaStreamGraphImpl::~MediaStreamGraphImpl\n");
 }
@@ -1631,19 +1631,20 @@ MediaStreamGraphImpl::RunInStableState(b
         // synchronously because it spins the event loop waiting for threads
         // to shut down, and we don't want to do that in a stable state handler.
         mLifecycleState = LIFECYCLE_WAITING_FOR_THREAD_SHUTDOWN;
         LIFECYCLE_LOG("Sending MediaStreamGraphShutDownRunnable %p", this);
         nsCOMPtr<nsIRunnable> event = new MediaStreamGraphShutDownRunnable(this );
         NS_DispatchToMainThread(event);
 
         LIFECYCLE_LOG("Disconnecting MediaStreamGraph %p", this);
-        if (this == gGraph) {
+        MediaStreamGraphImpl* graph;
+        if (gGraphs.Get(mAudioChannel, &graph) && graph == this) {
           // null out gGraph if that's the graph being shut down
-          gGraph = nullptr;
+          gGraphs.Remove(mAudioChannel);
         }
       }
     } else {
       if (mLifecycleState <= LIFECYCLE_WAITING_FOR_MAIN_THREAD_CLEANUP) {
         MessageBlock* block = mBackMessageQueue.AppendElement();
         block->mMessages.SwapElements(mCurrentTaskMessageQueue);
         block->mGraphUpdateIndex = mNextGraphUpdateIndex;
         ++mNextGraphUpdateIndex;
@@ -1784,19 +1785,22 @@ MediaStreamGraphImpl::AppendMessage(Cont
 #endif
     aMessage->RunDuringShutdown();
 #ifdef DEBUG
     mCanRunMessagesSynchronously = true;
 #endif
     delete aMessage;
     if (IsEmpty() &&
         mLifecycleState >= LIFECYCLE_WAITING_FOR_STREAM_DESTRUCTION) {
-      if (gGraph == this) {
-        gGraph = nullptr;
+
+      MediaStreamGraphImpl* graph;
+      if (gGraphs.Get(mAudioChannel, &graph) && graph == this) {
+        gGraphs.Remove(mAudioChannel);
       }
+
       Destroy();
     }
     return;
   }
 
   mCurrentTaskMessageQueue.AppendElement(aMessage);
   EnsureRunInStableState();
 }
@@ -2736,16 +2740,17 @@ MediaStreamGraphImpl::MediaStreamGraphIm
 #endif
   , mMemoryReportMonitor("MSGIMemory")
   , mSelfRef(MOZ_THIS_IN_INITIALIZER_LIST())
   , mAudioStreamSizes()
   , mNeedsMemoryReport(false)
 #ifdef DEBUG
   , mCanRunMessagesSynchronously(false)
 #endif
+  , mAudioChannel(static_cast<uint32_t>(aChannel))
 {
 #ifdef PR_LOGGING
   if (!gMediaStreamGraphLog) {
     gMediaStreamGraphLog = PR_NewLogModule("MediaStreamGraph");
   }
 #endif
 
   if (mRealtime) {
@@ -2774,50 +2779,65 @@ MediaStreamGraphImpl::Destroy()
   // Clear the self reference which will destroy this instance.
   mSelfRef = nullptr;
 }
 
 NS_IMPL_ISUPPORTS(MediaStreamGraphShutdownObserver, nsIObserver)
 
 static bool gShutdownObserverRegistered = false;
 
+namespace {
+
+PLDHashOperator
+ForceShutdownEnumerator(const uint32_t& /* aAudioChannel */,
+                        MediaStreamGraphImpl* aGraph,
+                        void* /* aUnused */)
+{
+  aGraph->ForceShutDown();
+  return PL_DHASH_NEXT;
+}
+
+} // anonymous namespace
+
 NS_IMETHODIMP
 MediaStreamGraphShutdownObserver::Observe(nsISupports *aSubject,
                                           const char *aTopic,
                                           const char16_t *aData)
 {
   if (strcmp(aTopic, NS_XPCOM_SHUTDOWN_OBSERVER_ID) == 0) {
-    if (gGraph) {
-      gGraph->ForceShutDown();
-    }
+    gGraphs.EnumerateRead(ForceShutdownEnumerator, nullptr);
     nsContentUtils::UnregisterShutdownObserver(this);
     gShutdownObserverRegistered = false;
   }
   return NS_OK;
 }
 
 MediaStreamGraph*
 MediaStreamGraph::GetInstance(DOMMediaStream::TrackTypeHints aHint, dom::AudioChannel aChannel)
 {
   NS_ASSERTION(NS_IsMainThread(), "Main thread only");
 
-  if (!gGraph) {
+  uint32_t channel = static_cast<uint32_t>(aChannel);
+  MediaStreamGraphImpl* graph = nullptr;
+
+  if (!gGraphs.Get(channel, &graph)) {
     if (!gShutdownObserverRegistered) {
       gShutdownObserverRegistered = true;
       nsContentUtils::RegisterShutdownObserver(new MediaStreamGraphShutdownObserver());
     }
 
     CubebUtils::InitPreferredSampleRate();
 
-    gGraph = new MediaStreamGraphImpl(true, CubebUtils::PreferredSampleRate(), aHint, aChannel);
+    graph = new MediaStreamGraphImpl(true, CubebUtils::PreferredSampleRate(), aHint, aChannel);
+    gGraphs.Put(channel, graph);
 
-    STREAM_LOG(PR_LOG_DEBUG, ("Starting up MediaStreamGraph %p", gGraph));
+    STREAM_LOG(PR_LOG_DEBUG, ("Starting up MediaStreamGraph %p", graph));
   }
 
-  return gGraph;
+  return graph;
 }
 
 MediaStreamGraph*
 MediaStreamGraph::CreateNonRealtimeInstance(TrackRate aSampleRate)
 {
   NS_ASSERTION(NS_IsMainThread(), "Main thread only");
 
   MediaStreamGraphImpl* graph = new MediaStreamGraphImpl(false, aSampleRate);
@@ -2990,17 +3010,20 @@ MediaStreamGraph::CreateAudioNodeStream(
   }
   graph->AppendMessage(new CreateMessage(stream));
   return stream;
 }
 
 bool
 MediaStreamGraph::IsNonRealtime() const
 {
-  return this != gGraph;
+  const MediaStreamGraphImpl* impl = static_cast<const MediaStreamGraphImpl*>(this);
+  MediaStreamGraphImpl* graph;
+
+  return !gGraphs.Get(impl->AudioChannel(), &graph) || graph != impl;
 }
 
 void
 MediaStreamGraph::StartNonRealtimeProcessing(TrackRate aRate, uint32_t aTicksToProcess)
 {
   NS_ASSERTION(NS_IsMainThread(), "main thread only");
 
   MediaStreamGraphImpl* graph = static_cast<MediaStreamGraphImpl*>(this);
--- a/dom/media/MediaStreamGraphImpl.h
+++ b/dom/media/MediaStreamGraphImpl.h
@@ -656,16 +656,18 @@ public:
    * Hold a ref to the Latency logger
    */
   nsRefPtr<AsyncLatencyLogger> mLatencyLog;
   AudioMixer mMixer;
 #ifdef MOZ_WEBRTC
   nsRefPtr<AudioOutputObserver> mFarendObserverRef;
 #endif
 
+  uint32_t AudioChannel() const { return mAudioChannel; }
+
 private:
   virtual ~MediaStreamGraphImpl();
 
   MOZ_DEFINE_MALLOC_SIZE_OF(MallocSizeOf)
 
   /**
    * Used to signal that a memory report has been requested.
    */
@@ -689,13 +691,16 @@ private:
 
 #ifdef DEBUG
   /**
    * Used to assert when AppendMessage() runs ControlMessages synchronously.
    */
   bool mCanRunMessagesSynchronously;
 #endif
 
+  // We use uint32_t instead AudioChannel because this is just used as key for
+  // the hashtable gGraphs.
+  uint32_t mAudioChannel;
 };
 
 }
 
 #endif /* MEDIASTREAMGRAPHIMPL_H_ */
--- a/dom/media/webaudio/AudioContext.cpp
+++ b/dom/media/webaudio/AudioContext.cpp
@@ -664,22 +664,16 @@ AudioContext::Unmute() const
 }
 
 AudioChannel
 AudioContext::MozAudioChannelType() const
 {
   return mDestination->MozAudioChannelType();
 }
 
-void
-AudioContext::SetMozAudioChannelType(AudioChannel aValue, ErrorResult& aRv)
-{
-  mDestination->SetMozAudioChannelType(aValue, aRv);
-}
-
 AudioChannel
 AudioContext::TestAudioChannelInAudioNodeStream()
 {
   MediaStream* stream = mDestination->Stream();
   MOZ_ASSERT(stream);
 
   return stream->AudioChannelType();
 }
--- a/dom/media/webaudio/AudioContext.h
+++ b/dom/media/webaudio/AudioContext.h
@@ -219,17 +219,16 @@ public:
   uint32_t MaxChannelCount() const;
 
   void Mute() const;
   void Unmute() const;
 
   JSObject* GetGlobalJSObject() const;
 
   AudioChannel MozAudioChannelType() const;
-  void SetMozAudioChannelType(AudioChannel aValue, ErrorResult& aRv);
 
   AudioChannel TestAudioChannelInAudioNodeStream();
 
   void UpdateNodeCount(int32_t aDelta);
 
   double DOMTimeToStreamTime(double aTime) const
   {
     return aTime - ExtraCurrentTime();
--- a/dom/media/webaudio/test/test_mozaudiochannel.html
+++ b/dom/media/webaudio/test/test_mozaudiochannel.html
@@ -13,37 +13,33 @@
 
 function test_basic() {
   var ac = new AudioContext();
   ok(ac, "AudioContext created");
 
   // Default
   is(ac.mozAudioChannelType, "normal", "Default ac channel == 'normal'");
 
-  // random wrong channel
-  ac.mozAudioChannelType = "foo";
+  // Unpermitted channels
+  ac = new AudioContext("content");
   is(ac.mozAudioChannelType, "normal", "Default ac channel == 'normal'");
 
-  // Unpermitted channels
-  ac.mozAudioChannelType = "content";
-  is(ac.mozAudioChannelType, "normal", "Default ac channel == 'normal'");
-
-  ac.mozAudioChannelType = "notification";
+  ac = new AudioContext("notification");
   is(ac.mozAudioChannelType, "normal", "Default ac channel == 'normal'");
 
-  ac.mozAudioChannelType = "alarm";
+  ac = new AudioContext("alarm");
   is(ac.mozAudioChannelType, "normal", "Default ac channel == 'normal'");
 
-  ac.mozAudioChannelType = "telephony";
+  ac = new AudioContext("telephony");
   is(ac.mozAudioChannelType, "normal", "Default ac channel == 'normal'");
 
-  ac.mozAudioChannelType = "ringer";
+  ac = new AudioContext("ringer");
   is(ac.mozAudioChannelType, "normal", "Default ac channel == 'normal'");
 
-  ac.mozAudioChannelType = "publicnotification";
+  ac = new AudioContext("publicnotification");
   is(ac.mozAudioChannelType, "normal", "Default ac channel == 'normal'");
 
   runTest();
 }
 
 function test_permission(aChannel) {
   var ac = new AudioContext();
   ok(ac, "AudioContext created");
@@ -51,17 +47,17 @@ function test_permission(aChannel) {
   is(ac.mozAudioChannelType, "normal", "Default ac channel == 'normal'");
 
   var channel = SpecialPowers.wrap(ac).testAudioChannelInAudioNodeStream();
   is(channel, "normal", "AudioNodeStream is using the correct default audio channel.");
 
   SpecialPowers.pushPermissions(
     [{ "type": "audio-channel-" + aChannel, "allow": true, "context": document }],
     function() {
-      ac.mozAudioChannelType = aChannel;
+      var ac = new AudioContext(aChannel);
       is(ac.mozAudioChannelType, aChannel, "Default ac channel == '" + aChannel + "'");
 
       var channel = SpecialPowers.wrap(ac).testAudioChannelInAudioNodeStream();
       is(channel, aChannel, "AudioNodeStream is using the correct new audio channel.");
 
       runTest();
     }
   );
@@ -142,13 +138,14 @@ function runTest() {
   }
 
   var test = tests.shift();
   test();
 }
 
 SpecialPowers.pushPrefEnv({"set": [["media.useAudioChannelService", true ]]}, runTest);
 SimpleTest.waitForExplicitFinish();
+SimpleTest.requestLongerTimeout(5);
 
 </script>
 </pre>
 </body>
 </html>
--- a/dom/webidl/AudioContext.webidl
+++ b/dom/webidl/AudioContext.webidl
@@ -73,18 +73,18 @@ interface AudioContext : EventTarget {
     [NewObject, Throws]
     PeriodicWave createPeriodicWave(Float32Array real, Float32Array imag);
 
 };
 
 // Mozilla extensions
 partial interface AudioContext {
   // Read AudioChannel.webidl for more information about this attribute.
-  [Pref="media.useAudioChannelService", SetterThrows]
-  attribute AudioChannel mozAudioChannelType;
+  [Pref="media.useAudioChannelService"]
+  readonly attribute AudioChannel mozAudioChannelType;
 
   // These 2 events are dispatched when the AudioContext object is muted by
   // the AudioChannelService. It's call 'interrupt' because when this event is
   // dispatched on a HTMLMediaElement, the audio stream is paused.
   [Pref="media.useAudioChannelService"]
   attribute EventHandler onmozinterruptbegin;
 
   [Pref="media.useAudioChannelService"]