Bug 1023175 - AudioContext should have attribute EventHandler onmozinterruptend/begin in the webIDL interface, r=ehsan, r=smaug
authorAndrea Marchesini <amarchesini@mozilla.com>
Fri, 13 Jun 2014 07:06:14 +0100
changeset 210566 adce3575049d6b038c4ddb36d52f55fee62fc89c
parent 210565 2eb937edf1d5815a258730617bd4461d61ffd305
child 210567 193ef9b92584aaef3de20796992080cc579939a0
push id515
push userraliiev@mozilla.com
push dateMon, 06 Oct 2014 12:51:51 +0000
treeherdermozilla-release@267c7a481bef [default view] [failures only]
perfherder[talos] [build metrics] [platform microbench] (compared to previous push)
reviewersehsan, smaug
bugs1023175
milestone33.0a1
first release with
nightly linux32
nightly linux64
nightly mac
nightly win32
nightly win64
last release without
nightly linux32
nightly linux64
nightly mac
nightly win32
nightly win64
Bug 1023175 - AudioContext should have attribute EventHandler onmozinterruptend/begin in the webIDL interface, r=ehsan, r=smaug
content/base/src/nsGkAtomList.h
content/media/webaudio/AudioContext.cpp
content/media/webaudio/AudioContext.h
content/media/webaudio/test/browser.ini
content/media/webaudio/test/browser_mozAudioChannel.html
content/media/webaudio/test/browser_mozAudioChannel.js
content/media/webaudio/test/moz.build
dom/webidl/AudioContext.webidl
--- a/content/base/src/nsGkAtomList.h
+++ b/content/base/src/nsGkAtomList.h
@@ -1812,16 +1812,20 @@ GK_ATOM(ondevicemotion, "ondevicemotion"
 GK_ATOM(ondeviceorientation, "ondeviceorientation")
 GK_ATOM(ondeviceproximity, "ondeviceproximity")
 GK_ATOM(onmozorientationchange, "onmozorientationchange")
 GK_ATOM(onuserproximity, "onuserproximity")
 
 // light sensor support
 GK_ATOM(ondevicelight, "ondevicelight")
 
+// Audio channel events
+GK_ATOM(onmozinterruptbegin, "onmozinterruptbegin")
+GK_ATOM(onmozinterruptend, "onmozinterruptbegin")
+
 //---------------------------------------------------------------------------
 // Special atoms
 //---------------------------------------------------------------------------
 
 // Node types
 GK_ATOM(cdataTagName, "#cdata-section")
 GK_ATOM(commentTagName, "#comment")
 GK_ATOM(documentNodeName, "#document")
--- a/content/media/webaudio/AudioContext.cpp
+++ b/content/media/webaudio/AudioContext.cpp
@@ -9,16 +9,17 @@
 #include "nsPIDOMWindow.h"
 #include "mozilla/ErrorResult.h"
 #include "mozilla/dom/AnalyserNode.h"
 #include "mozilla/dom/AudioContextBinding.h"
 #include "mozilla/dom/HTMLMediaElement.h"
 #include "mozilla/dom/OfflineAudioContextBinding.h"
 #include "mozilla/dom/OwningNonNull.h"
 #include "MediaStreamGraph.h"
+#include "AudioChannelService.h"
 #include "AudioDestinationNode.h"
 #include "AudioBufferSourceNode.h"
 #include "AudioBuffer.h"
 #include "GainNode.h"
 #include "MediaElementAudioSourceNode.h"
 #include "MediaStreamAudioSourceNode.h"
 #include "DelayNode.h"
 #include "PannerNode.h"
@@ -126,17 +127,19 @@ AudioContext::Constructor(const GlobalOb
                           ErrorResult& aRv)
 {
   nsCOMPtr<nsPIDOMWindow> window = do_QueryInterface(aGlobal.GetAsSupports());
   if (!window) {
     aRv.Throw(NS_ERROR_FAILURE);
     return nullptr;
   }
 
-  nsRefPtr<AudioContext> object = new AudioContext(window, false);
+  nsRefPtr<AudioContext> object =
+    new AudioContext(window, false,
+                     AudioChannelService::GetDefaultAudioChannel());
 
   RegisterWeakMemoryReporter(object);
 
   return object.forget();
 }
 
 /* static */ already_AddRefed<AudioContext>
 AudioContext::Constructor(const GlobalObject& aGlobal,
--- a/content/media/webaudio/AudioContext.h
+++ b/content/media/webaudio/AudioContext.h
@@ -62,17 +62,17 @@ class ScriptProcessorNode;
 class WaveShaperNode;
 class PeriodicWave;
 
 class AudioContext MOZ_FINAL : public DOMEventTargetHelper,
                                public nsIMemoryReporter
 {
   AudioContext(nsPIDOMWindow* aParentWindow,
                bool aIsOffline,
-               AudioChannel aChannel = AudioChannel::Normal,
+               AudioChannel aChannel,
                uint32_t aNumberOfChannels = 0,
                uint32_t aLength = 0,
                float aSampleRate = 0.0f);
   ~AudioContext();
 
 public:
   NS_DECL_ISUPPORTS_INHERITED
   NS_DECL_CYCLE_COLLECTION_CLASS_INHERITED(AudioContext,
@@ -226,16 +226,19 @@ public:
 
   void UpdateNodeCount(int32_t aDelta);
 
   double DOMTimeToStreamTime(double aTime) const
   {
     return aTime - ExtraCurrentTime();
   }
 
+  IMPL_EVENT_HANDLER(mozinterruptbegin)
+  IMPL_EVENT_HANDLER(mozinterruptend)
+
 private:
   /**
    * Returns the amount of extra time added to the current time of the
    * AudioDestinationNode's MediaStream to get this AudioContext's currentTime.
    * Must be subtracted from all DOM API parameter times that are on the same
    * timeline as AudioContext's currentTime to get times we can pass to the
    * MediaStreamGraph.
    */
new file mode 100644
--- /dev/null
+++ b/content/media/webaudio/test/browser.ini
@@ -0,0 +1,5 @@
+[DEFAULT]
+support-files =
+  browser_mozAudioChannel.html
+
+[browser_mozAudioChannel.js]
new file mode 100644
--- /dev/null
+++ b/content/media/webaudio/test/browser_mozAudioChannel.html
@@ -0,0 +1,31 @@
+<!DOCTYPE HTML PUBLIC "-//W3C//DTD HTML 4.01//EN">
+<meta charset="utf-8">
+<title>Test for mozinterruptbegin/end in AudioContext</title>
+
+mozAudioChannelTest = <span id="mozAudioChannelTest">FAIL</span>
+
+<script type="application/javascript">
+
+  var ac = new AudioContext();
+
+  var buffer = ac.createBuffer(1, 2048, ac.sampleRate);
+  for (var i = 0; i < 2048; ++i) {
+    buffer.getChannelData(0)[i] = Math.sin(440 * 2 * Math.PI * i / ac.sampleRate);
+  }
+
+  var source = ac.createBufferSource();
+  source.buffer = buffer;
+  source.connect(ac.destination);
+  source.start(0);
+
+  ac.onmozinterruptbegin = function(evt) {
+    document.getElementById("mozAudioChannelTest").innerHTML = "mozinterruptbegin";
+  }
+
+  ac.addEventListener('mozinterruptend', function() {
+    document.getElementById("mozAudioChannelTest").innerHTML = "mozinterruptend";
+  }, false);
+
+  document.getElementById("mozAudioChannelTest").innerHTML = "READY";
+
+</script>
new file mode 100644
--- /dev/null
+++ b/content/media/webaudio/test/browser_mozAudioChannel.js
@@ -0,0 +1,71 @@
+/* This Source Code Form is subject to the terms of the Mozilla Public
+ * License, v. 2.0. If a copy of the MPL was not distributed with this
+ * file, You can obtain one at http://mozilla.org/MPL/2.0/. */
+
+function whenBrowserLoaded(aBrowser, aCallback) {
+  aBrowser.addEventListener("load", function onLoad(event) {
+    if (event.target == aBrowser.contentDocument) {
+      aBrowser.removeEventListener("load", onLoad, true);
+      executeSoon(aCallback);
+    }
+  }, true);
+}
+
+function whenTabRestored(aTab, aCallback) {
+  aTab.addEventListener("SSTabRestored", function onRestored(aEvent) {
+    aTab.removeEventListener("SSTabRestored", onRestored, true);
+    executeSoon(function executeWhenTabRestored() {
+      aCallback();
+    });
+  }, true);
+}
+
+function whenBrowserUnloaded(aBrowser, aCallback) {
+  aBrowser.addEventListener("unload", function onUnload() {
+    aBrowser.removeEventListener("unload", onUnload, true);
+    executeSoon(aCallback);
+  }, true);
+}
+
+function test() {
+
+  waitForExplicitFinish();
+
+  let testURL = "http://mochi.test:8888/browser/" +
+    "content/media/webaudio/test/browser_mozAudioChannel.html";
+
+  SpecialPowers.pushPrefEnv({"set": [["media.defaultAudioChannel", "content" ],
+                                     ["media.useAudioChannelService", true ]]},
+    function() {
+      let tab1 = gBrowser.addTab(testURL);
+      gBrowser.selectedTab = tab1;
+
+      whenBrowserLoaded(tab1.linkedBrowser, function() {
+        let doc = tab1.linkedBrowser.contentDocument;
+        is(doc.getElementById("mozAudioChannelTest").textContent, "READY",
+           "Test is ready to run");
+
+        SpecialPowers.pushPrefEnv({"set": [["media.defaultAudioChannel", "telephony" ]]},
+          function() {
+            let tab2 = gBrowser.duplicateTab(tab1);
+            gBrowser.selectedTab = tab2;
+            whenTabRestored(tab2, function() {
+              is(doc.getElementById("mozAudioChannelTest").textContent, "mozinterruptbegin",
+                 "AudioContext has been muted by the second tab.");
+
+              whenBrowserUnloaded(tab2.linkedBrowser, function() {
+                is(doc.getElementById("mozAudioChannelTest").textContent, "mozinterruptend",
+                   "AudioContext has been unmuted.");
+                gBrowser.removeTab(tab1);
+                finish();
+              });
+
+              gBrowser.removeTab(tab2);
+              gBrowser.selectedTab = tab1;
+            });
+          }
+        );
+      });
+    }
+  );
+}
--- a/content/media/webaudio/test/moz.build
+++ b/content/media/webaudio/test/moz.build
@@ -7,8 +7,12 @@
 MOCHITEST_MANIFESTS += [
     'blink/mochitest.ini',
     'mochitest.ini',
 ]
 
 MOCHITEST_CHROME_MANIFESTS += [
     'chrome.ini'
 ]
+
+BROWSER_CHROME_MANIFESTS += [
+    'browser.ini'
+]
--- a/dom/webidl/AudioContext.webidl
+++ b/dom/webidl/AudioContext.webidl
@@ -76,8 +76,19 @@ interface AudioContext : EventTarget {
 };
 
 // Mozilla extensions
 partial interface AudioContext {
   // Read AudioChannel.webidl for more information about this attribute.
   [Pref="media.useAudioChannelService", SetterThrows]
   attribute AudioChannel mozAudioChannelType;
 };
+
+partial interface AudioContext {
+  // These 2 events are dispatched when the AudioContext object is muted by
+  // the AudioChannelService. It's call 'interrupt' because when this event is
+  // dispatched on a HTMLMediaElement, the audio stream is paused.
+  [Pref="media.useAudioChannelService"]
+  attribute EventHandler onmozinterruptbegin;
+
+  [Pref="media.useAudioChannelService"]
+  attribute EventHandler onmozinterruptend;
+};