Bug 1027172 - Part 2: AudioContext should dispatch call AudioChannelAgent::StopPlaying() when the destination node doesn't have any input or it's muted. r=ehsan, a=sledru
authorAndrea Marchesini <amarchesini@mozilla.com>
Tue, 24 Jun 2014 22:15:36 -0700
changeset 208796 3ee7e6b1dc88492d8aa4a31053ddf6b2b52e40a3
parent 208795 46c8bd676a5057c23cdd0b2275efff3b7c86a079
child 208797 42b898f4f7aa04843a5e29c2e37960823262aba4
push id494
push userraliiev@mozilla.com
push dateMon, 25 Aug 2014 18:42:16 +0000
treeherdermozilla-release@a3cc3e46b571 [default view] [failures only]
perfherder[talos] [build metrics] [platform microbench] (compared to previous push)
reviewersehsan, sledru
bugs1027172
milestone32.0a2
Bug 1027172 - Part 2: AudioContext should dispatch call AudioChannelAgent::StopPlaying() when the destination node doesn't have any input or it's muted. r=ehsan, a=sledru
content/media/AudioSegment.h
content/media/webaudio/AudioDestinationNode.cpp
content/media/webaudio/AudioDestinationNode.h
content/media/webaudio/test/browser.ini
content/media/webaudio/test/browser_mozAudioChannel.html
content/media/webaudio/test/browser_mozAudioChannel.js
content/media/webaudio/test/browser_mozAudioChannel_muted.html
content/media/webaudio/test/browser_mozAudioChannel_muted.js
--- a/content/media/AudioSegment.h
+++ b/content/media/AudioSegment.h
@@ -126,16 +126,18 @@ struct AudioChunk {
     mBuffer = nullptr;
     mChannelData.Clear();
     mDuration = aDuration;
     mVolume = 1.0f;
     mBufferFormat = AUDIO_FORMAT_SILENCE;
   }
   int ChannelCount() const { return mChannelData.Length(); }
 
+  bool IsMuted() const { return mVolume == 0.0f; }
+
   size_t SizeOfExcludingThisIfUnshared(MallocSizeOf aMallocSizeOf) const
   {
     return SizeOfExcludingThis(aMallocSizeOf, true);
   }
 
   size_t SizeOfExcludingThis(MallocSizeOf aMallocSizeOf, bool aUnshared) const
   {
     size_t amount = 0;
--- a/content/media/webaudio/AudioDestinationNode.cpp
+++ b/content/media/webaudio/AudioDestinationNode.cpp
@@ -173,32 +173,72 @@ private:
   InputChannels mInputChannels;
   // An index representing the next offset in mInputChannels to be written to.
   uint32_t mWriteIndex;
   // How many frames the OfflineAudioContext intends to produce.
   uint32_t mLength;
   float mSampleRate;
 };
 
+class InputMutedRunnable : public nsRunnable
+{
+public:
+  InputMutedRunnable(AudioNodeStream* aStream,
+                     bool aInputMuted)
+    : mStream(aStream)
+    , mInputMuted(aInputMuted)
+  {
+  }
+
+  NS_IMETHOD Run()
+  {
+    MOZ_ASSERT(NS_IsMainThread());
+    nsRefPtr<AudioNode> node = mStream->Engine()->NodeMainThread();
+
+    if (node) {
+      nsRefPtr<AudioDestinationNode> destinationNode =
+        static_cast<AudioDestinationNode*>(node.get());
+      destinationNode->InputMuted(mInputMuted);
+    }
+    return NS_OK;
+  }
+
+private:
+  nsRefPtr<AudioNodeStream> mStream;
+  bool mInputMuted;
+};
+
 class DestinationNodeEngine : public AudioNodeEngine
 {
 public:
   explicit DestinationNodeEngine(AudioDestinationNode* aNode)
     : AudioNodeEngine(aNode)
     , mVolume(1.0f)
+    , mLastInputMuted(true)
   {
+    MOZ_ASSERT(aNode);
   }
 
   virtual void ProcessBlock(AudioNodeStream* aStream,
                             const AudioChunk& aInput,
                             AudioChunk* aOutput,
                             bool* aFinished) MOZ_OVERRIDE
   {
     *aOutput = aInput;
     aOutput->mVolume *= mVolume;
+
+    bool newInputMuted = aInput.IsNull() || aInput.IsMuted();
+    if (newInputMuted != mLastInputMuted) {
+      mLastInputMuted = newInputMuted;
+
+      nsRefPtr<InputMutedRunnable> runnable =
+        new InputMutedRunnable(aStream, newInputMuted);
+      aStream->Graph()->
+        DispatchToMainThreadAfterStreamStateUpdate(runnable.forget());
+    }
   }
 
   virtual void SetDoubleParameter(uint32_t aIndex, double aParam) MOZ_OVERRIDE
   {
     if (aIndex == VOLUME) {
       mVolume = aParam;
     }
   }
@@ -209,16 +249,17 @@ public:
 
   virtual size_t SizeOfIncludingThis(MallocSizeOf aMallocSizeOf) const MOZ_OVERRIDE
   {
     return aMallocSizeOf(this) + SizeOfExcludingThis(aMallocSizeOf);
   }
 
 private:
   float mVolume;
+  bool mLastInputMuted;
 };
 
 static bool UseAudioChannelService()
 {
   return Preferences::GetBool("media.useAudioChannelService");
 }
 
 NS_IMPL_CYCLE_COLLECTION_INHERITED(AudioDestinationNode, AudioNode,
@@ -534,22 +575,16 @@ AudioDestinationNode::CreateAudioChannel
                                            this);
 
   nsCOMPtr<nsIDocShell> docshell = do_GetInterface(GetOwner());
   if (docshell) {
     bool isActive = false;
     docshell->GetIsActive(&isActive);
     mAudioChannelAgent->SetVisibilityState(isActive);
   }
-
-  int32_t state = 0;
-  mAudioChannelAgent->StartPlaying(&state);
-  mAudioChannelAgentPlaying =
-    state == AudioChannelState::AUDIO_CHANNEL_STATE_NORMAL;
-  SetCanPlay(mAudioChannelAgentPlaying);
 }
 
 void
 AudioDestinationNode::NotifyStableState()
 {
   mExtraCurrentTimeUpdatedSinceLastStableState = false;
 }
 
@@ -611,11 +646,31 @@ AudioDestinationNode::SetIsOnlyNodeForCo
     ExtraCurrentTime();
     mExtraCurrentTime += mExtraCurrentTimeSinceLastStartedBlocking;
     mExtraCurrentTimeSinceLastStartedBlocking = 0;
     mStream->ChangeExplicitBlockerCount(-1);
     mStartedBlockingDueToBeingOnlyNode = TimeStamp();
   }
 }
 
+void
+AudioDestinationNode::InputMuted(bool aMuted)
+{
+  MOZ_ASSERT(Context() && !Context()->IsOffline());
+
+  if (!mAudioChannelAgent) {
+    return;
+  }
+
+  if (aMuted) {
+    mAudioChannelAgent->StopPlaying();
+    return;
+  }
+
+  int32_t state = 0;
+  mAudioChannelAgent->StartPlaying(&state);
+  mAudioChannelAgentPlaying =
+    state == AudioChannelState::AUDIO_CHANNEL_STATE_NORMAL;
+  SetCanPlay(mAudioChannelAgentPlaying);
 }
 
-}
+} // dom namespace
+} // mozilla namespace
--- a/content/media/webaudio/AudioDestinationNode.h
+++ b/content/media/webaudio/AudioDestinationNode.h
@@ -76,16 +76,18 @@ public:
   virtual const char* NodeType() const
   {
     return "AudioDestinationNode";
   }
 
   virtual size_t SizeOfExcludingThis(MallocSizeOf aMallocSizeOf) const MOZ_OVERRIDE;
   virtual size_t SizeOfIncludingThis(MallocSizeOf aMallocSizeOf) const MOZ_OVERRIDE;
 
+  void InputMuted(bool aInputMuted);
+
 private:
   bool CheckAudioChannelPermissions(AudioChannel aValue);
   void CreateAudioChannelAgent();
 
   void SetCanPlay(bool aCanPlay);
 
   void NotifyStableState();
   void ScheduleStableStateNotification();
--- a/content/media/webaudio/test/browser.ini
+++ b/content/media/webaudio/test/browser.ini
@@ -1,5 +1,7 @@
 [DEFAULT]
 support-files =
   browser_mozAudioChannel.html
+  browser_mozAudioChannel_muted.html
 
 [browser_mozAudioChannel.js]
+[browser_mozAudioChannel_muted.js]
--- a/content/media/webaudio/test/browser_mozAudioChannel.html
+++ b/content/media/webaudio/test/browser_mozAudioChannel.html
@@ -1,31 +1,33 @@
 <!DOCTYPE HTML PUBLIC "-//W3C//DTD HTML 4.01//EN">
 <meta charset="utf-8">
 <title>Test for mozinterruptbegin/end in AudioContext</title>
 
-mozAudioChannelTest = <span id="mozAudioChannelTest">FAIL</span>
-
 <script type="application/javascript">
 
   var ac = new AudioContext();
 
+  function createEvent(msg) {
+    var event = document.createEvent('CustomEvent');
+    event.initCustomEvent('testmozchannel', true, true, { msg: msg });
+    dispatchEvent(event);
+  }
+
+  ac.onmozinterruptbegin = function(evt) {
+    createEvent('mozinterruptbegin');
+  }
+
+  ac.addEventListener('mozinterruptend', function() {
+    createEvent('mozinterruptend');
+  }, false);
+
   var buffer = ac.createBuffer(1, 2048, ac.sampleRate);
   for (var i = 0; i < 2048; ++i) {
     buffer.getChannelData(0)[i] = Math.sin(440 * 2 * Math.PI * i / ac.sampleRate);
   }
 
   var source = ac.createBufferSource();
   source.buffer = buffer;
   source.connect(ac.destination);
+  source.loop = true;
   source.start(0);
-
-  ac.onmozinterruptbegin = function(evt) {
-    document.getElementById("mozAudioChannelTest").innerHTML = "mozinterruptbegin";
-  }
-
-  ac.addEventListener('mozinterruptend', function() {
-    document.getElementById("mozAudioChannelTest").innerHTML = "mozinterruptend";
-  }, false);
-
-  document.getElementById("mozAudioChannelTest").innerHTML = "READY";
-
 </script>
--- a/content/media/webaudio/test/browser_mozAudioChannel.js
+++ b/content/media/webaudio/test/browser_mozAudioChannel.js
@@ -22,50 +22,65 @@ function whenTabRestored(aTab, aCallback
 
 function whenBrowserUnloaded(aBrowser, aCallback) {
   aBrowser.addEventListener("unload", function onUnload() {
     aBrowser.removeEventListener("unload", onUnload, true);
     executeSoon(aCallback);
   }, true);
 }
 
+var event;
+var next = function() {}
+
+function eventListener(evt) {
+  info("Event has been received!");
+  is(evt.detail.msg, event, "AudioContext has been received the right event: " + event);
+  next();
+}
+
 function test() {
 
   waitForExplicitFinish();
 
   let testURL = "http://mochi.test:8888/browser/" +
     "content/media/webaudio/test/browser_mozAudioChannel.html";
 
   SpecialPowers.pushPrefEnv({"set": [["media.defaultAudioChannel", "content" ],
                                      ["media.useAudioChannelService", true ]]},
     function() {
       let tab1 = gBrowser.addTab(testURL);
       gBrowser.selectedTab = tab1;
 
       whenBrowserLoaded(tab1.linkedBrowser, function() {
         let doc = tab1.linkedBrowser.contentDocument;
-        is(doc.getElementById("mozAudioChannelTest").textContent, "READY",
-           "Test is ready to run");
+        tab1.linkedBrowser.contentWindow.addEventListener('testmozchannel', eventListener, false);
 
         SpecialPowers.pushPrefEnv({"set": [["media.defaultAudioChannel", "telephony" ]]},
           function() {
-            let tab2 = gBrowser.duplicateTab(tab1);
-            gBrowser.selectedTab = tab2;
-            whenTabRestored(tab2, function() {
-              is(doc.getElementById("mozAudioChannelTest").textContent, "mozinterruptbegin",
-                 "AudioContext has been muted by the second tab.");
-
-              whenBrowserUnloaded(tab2.linkedBrowser, function() {
-                is(doc.getElementById("mozAudioChannelTest").textContent, "mozinterruptend",
-                   "AudioContext has been unmuted.");
+            event = 'mozinterruptbegin';
+            next = function() {
+              info("Next is called.");
+              event = 'mozinterruptend';
+              next =  function() {
+                info("Next is called again.");
+                tab1.linkedBrowser.contentWindow.removeEventListener('testmozchannel', eventListener);
                 gBrowser.removeTab(tab1);
                 finish();
-              });
+              }
+
+              info("Unloading a tab...");
+              whenBrowserUnloaded(tab2.linkedBrowser, function() { info("Tab unloaded."); });
 
               gBrowser.removeTab(tab2);
               gBrowser.selectedTab = tab1;
-            });
+            }
+
+            let tab2 = gBrowser.duplicateTab(tab1);
+            gBrowser.selectedTab = tab2;
+
+            info("Restoring the tab...");
+            whenTabRestored(tab2, function() { info("Tab restored."); });
           }
         );
       });
     }
   );
 }
copy from content/media/webaudio/test/browser_mozAudioChannel.html
copy to content/media/webaudio/test/browser_mozAudioChannel_muted.html
--- a/content/media/webaudio/test/browser_mozAudioChannel.html
+++ b/content/media/webaudio/test/browser_mozAudioChannel_muted.html
@@ -3,26 +3,16 @@
 <title>Test for mozinterruptbegin/end in AudioContext</title>
 
 mozAudioChannelTest = <span id="mozAudioChannelTest">FAIL</span>
 
 <script type="application/javascript">
 
   var ac = new AudioContext();
 
-  var buffer = ac.createBuffer(1, 2048, ac.sampleRate);
-  for (var i = 0; i < 2048; ++i) {
-    buffer.getChannelData(0)[i] = Math.sin(440 * 2 * Math.PI * i / ac.sampleRate);
-  }
-
-  var source = ac.createBufferSource();
-  source.buffer = buffer;
-  source.connect(ac.destination);
-  source.start(0);
-
   ac.onmozinterruptbegin = function(evt) {
     document.getElementById("mozAudioChannelTest").innerHTML = "mozinterruptbegin";
   }
 
   ac.addEventListener('mozinterruptend', function() {
     document.getElementById("mozAudioChannelTest").innerHTML = "mozinterruptend";
   }, false);
 
copy from content/media/webaudio/test/browser_mozAudioChannel.js
copy to content/media/webaudio/test/browser_mozAudioChannel_muted.js
--- a/content/media/webaudio/test/browser_mozAudioChannel.js
+++ b/content/media/webaudio/test/browser_mozAudioChannel_muted.js
@@ -27,17 +27,17 @@ function whenBrowserUnloaded(aBrowser, a
   }, true);
 }
 
 function test() {
 
   waitForExplicitFinish();
 
   let testURL = "http://mochi.test:8888/browser/" +
-    "content/media/webaudio/test/browser_mozAudioChannel.html";
+    "content/media/webaudio/test/browser_mozAudioChannel_muted.html";
 
   SpecialPowers.pushPrefEnv({"set": [["media.defaultAudioChannel", "content" ],
                                      ["media.useAudioChannelService", true ]]},
     function() {
       let tab1 = gBrowser.addTab(testURL);
       gBrowser.selectedTab = tab1;
 
       whenBrowserLoaded(tab1.linkedBrowser, function() {
@@ -45,22 +45,22 @@ function test() {
         is(doc.getElementById("mozAudioChannelTest").textContent, "READY",
            "Test is ready to run");
 
         SpecialPowers.pushPrefEnv({"set": [["media.defaultAudioChannel", "telephony" ]]},
           function() {
             let tab2 = gBrowser.duplicateTab(tab1);
             gBrowser.selectedTab = tab2;
             whenTabRestored(tab2, function() {
-              is(doc.getElementById("mozAudioChannelTest").textContent, "mozinterruptbegin",
-                 "AudioContext has been muted by the second tab.");
+              is(doc.getElementById("mozAudioChannelTest").textContent, "READY",
+                 "AudioContext should not be muted by the second tab.");
 
               whenBrowserUnloaded(tab2.linkedBrowser, function() {
-                is(doc.getElementById("mozAudioChannelTest").textContent, "mozinterruptend",
-                   "AudioContext has been unmuted.");
+                is(doc.getElementById("mozAudioChannelTest").textContent, "READY",
+                   "AudioContext should not be muted by the second tab.");
                 gBrowser.removeTab(tab1);
                 finish();
               });
 
               gBrowser.removeTab(tab2);
               gBrowser.selectedTab = tab1;
             });
           }