Bug 1375119 - Consider a page active if it has running AudioContexts. r=ehsan, a=jcristau
authorPaul Adenot <paul@paul.cx>
Mon, 24 Jul 2017 11:17:14 +0200
changeset 414454 9c54a60123e946f75ba00610130ee0dc63f3d39d
parent 414453 b49e5e19d10fa3f245dbf905f6509aefb7ed6ea5
child 414455 4aeee354238927037b46f81b668e96cd7acb11ec
push id1490
push usermtabara@mozilla.com
push dateMon, 31 Jul 2017 14:08:16 +0000
treeherdermozilla-release@70e32e6bf15e [default view] [failures only]
perfherder[talos] [build metrics] [platform microbench] (compared to previous push)
reviewersehsan, jcristau
bugs1375119
milestone55.0
Bug 1375119 - Consider a page active if it has running AudioContexts. r=ehsan, a=jcristau MozReview-Commit-ID: IOQ2DY9LoTw
dom/base/nsGlobalWindow.cpp
dom/base/test/browser.ini
dom/base/test/browser_timeout_throttling_with_audio_playback.js
dom/base/test/file_webaudioLoop.html
dom/base/test/file_webaudio_startstop.html
dom/base/test/mochitest.ini
dom/media/webaudio/AudioContext.cpp
dom/media/webaudio/AudioContext.h
--- a/dom/base/nsGlobalWindow.cpp
+++ b/dom/base/nsGlobalWindow.cpp
@@ -4372,16 +4372,21 @@ void
 nsPIDOMWindowInner::SyncStateFromParentWindow()
 {
   nsGlobalWindow::Cast(this)->SyncStateFromParentWindow();
 }
 
 bool
 nsPIDOMWindowInner::IsPlayingAudio()
 {
+  for (uint32_t i = 0; i < mAudioContexts.Length(); i++) {
+    if (mAudioContexts[i]->IsRunning()) {
+      return true;
+    }
+  }
   RefPtr<AudioChannelService> acs = AudioChannelService::Get();
   if (!acs) {
     return false;
   }
   auto outer = GetOuterWindow();
   if (!outer) {
     // We've been unlinked and are about to die.  Not a good time to pretend to
     // be playing audio.
--- a/dom/base/test/browser.ini
+++ b/dom/base/test/browser.ini
@@ -16,16 +16,17 @@ support-files =
   file_use_counter_outer.html
   file_use_counter_svg_getElementById.svg
   file_use_counter_svg_currentScale.svg
   file_use_counter_svg_fill_pattern_definition.svg
   file_use_counter_svg_fill_pattern.svg
   file_use_counter_svg_fill_pattern_internal.svg
   file_use_counter_svg_fill_pattern_data.svg
   file_webaudioLoop.html
+  file_webaudio_startstop.html
   plugin.js
 
 [browser_bug593387.js]
 [browser_bug902350.js]
 tags = mcb
 [browser_bug1011748.js]
 [browser_bug1058164.js]
 [browser_force_process_selector.js]
--- a/dom/base/test/browser_timeout_throttling_with_audio_playback.js
+++ b/dom/base/test/browser_timeout_throttling_with_audio_playback.js
@@ -1,15 +1,15 @@
 const kBaseURI = "http://mochi.test:8888/browser/dom/base/test/empty.html";
 const kPluginJS = "chrome://mochitests/content/browser/dom/base/test/plugin.js";
 var testURLs = [
   "http://mochi.test:8888/browser/dom/base/test/file_audioLoop.html",
   "http://mochi.test:8888/browser/dom/base/test/file_audioLoopInIframe.html",
   "http://mochi.test:8888/browser/dom/base/test/file_pluginAudio.html",
-  "http://mochi.test:8888/browser/dom/base/test/file_webaudioLoop.html",
+  "http://mochi.test:8888/browser/dom/base/test/file_webaudio_startstop.html",
 ];
 
 // We want to ensure that while audio is being played back, a background tab is
 // treated the same as a foreground tab as far as timeout throttling is concerned.
 // So we use a 10ms minimum timeout value for foreground tabs and a 100,000 second
 // minimum timeout value for background tabs.  This means that in case the test
 // fails, it will time out in practice, but just for sanity the test condition
 // ensures that the observed timeout delay falls in this range.
--- a/dom/base/test/file_webaudioLoop.html
+++ b/dom/base/test/file_webaudioLoop.html
@@ -3,29 +3,20 @@
 var ac = new AudioContext();
 var runningPromise = new Promise(resolve => {
   ac.onstatechange = event => {
     if (ac.state == "running") {
       resolve();
     }
   };
 });
-fetch("audio.ogg").then(response => {
-  return response.arrayBuffer();
-}).then(ab => {
-  return ac.decodeAudioData(ab);
-}).then(ab => {
-  var src = ac.createBufferSource();
-  src.buffer = ab;
-  src.loop = true;
-  src.loopStart = 0;
-  src.loopEnd = ab.duration;
-  src.start();
-  src.connect(ac.destination);
-});
+
+var osc = ac.createOscillator();
+osc.connect(ac.destination);
+osc.start(0);
 
 var suspendPromise;
 function suspendAC() {
   runningPromise.then(() => {
     suspendPromise = ac.suspend();
   });
 }
 
new file mode 100644
--- /dev/null
+++ b/dom/base/test/file_webaudio_startstop.html
@@ -0,0 +1,36 @@
+<!DOCTYPE html>
+<script>
+var ac = new AudioContext();
+var runningPromise = new Promise(resolve => {
+  ac.onstatechange = event => {
+    if (ac.state == "running") {
+      resolve();
+    }
+  };
+});
+
+var osc = ac.createOscillator();
+osc.connect(ac.destination);
+osc.start(0);
+osc.stop(osc.context.currentTime + 2.0);
+
+var suspendPromise;
+function suspendAC() {
+  runningPromise.then(() => {
+    suspendPromise = ac.suspend();
+  });
+}
+
+var resumePromise;
+function resumeAC() {
+  suspendPromise.then(() => {
+    resumePromise = ac.resume();
+  });
+}
+
+function closeAC() {
+  resumePromise.then(() => {
+    ac.close();
+  });
+}
+</script>
--- a/dom/base/test/mochitest.ini
+++ b/dom/base/test/mochitest.ini
@@ -203,16 +203,17 @@ support-files =
   viewport_helpers.js
   w3element_traversal.svg
   wholeTexty-helper.xml
   referrerHelper.js
   img_referrer_testserver.sjs
   file_audioLoop.html
   file_webaudioLoop.html
   file_webaudioLoop2.html
+  file_webaudio_startstop.html
   file_pluginAudio.html
   file_pluginAudioNonAutoStart.html
   noaudio.webm
   referrer_helper.js
   referrer_testserver.sjs
   script_postmessages_fileList.js
   iframe_postMessages.html
   test_anonymousContent_style_csp.html^headers^
--- a/dom/media/webaudio/AudioContext.cpp
+++ b/dom/media/webaudio/AudioContext.cpp
@@ -487,16 +487,22 @@ AudioListener*
 AudioContext::Listener()
 {
   if (!mListener) {
     mListener = new AudioListener(this);
   }
   return mListener;
 }
 
+bool
+AudioContext::IsRunning() const
+{
+  return mAudioContextState == AudioContextState::Running;
+}
+
 already_AddRefed<Promise>
 AudioContext::DecodeAudioData(const ArrayBuffer& aBuffer,
                               const Optional<OwningNonNull<DecodeSuccessCallback> >& aSuccessCallback,
                               const Optional<OwningNonNull<DecodeErrorCallback> >& aFailureCallback,
                               ErrorResult& aRv)
 {
   nsCOMPtr<nsIGlobalObject> parentObject = do_QueryInterface(GetParentObject());
   RefPtr<Promise> promise;
--- a/dom/media/webaudio/AudioContext.h
+++ b/dom/media/webaudio/AudioContext.h
@@ -176,16 +176,17 @@ public:
 
   bool ShouldSuspendNewStream() const { return mSuspendCalled; }
 
   double CurrentTime() const;
 
   AudioListener* Listener();
 
   AudioContextState State() const { return mAudioContextState; }
+  bool IsRunning() const;
 
   // Those three methods return a promise to content, that is resolved when an
   // (possibly long) operation is completed on the MSG (and possibly other)
   // thread(s). To avoid having to match the calls and asychronous result when
   // the operation is completed, we keep a reference to the promises on the main
   // thread, and then send the promises pointers down the MSG thread, as a void*
   // (to make it very clear that the pointer is to merely be treated as an ID).
   // When back on the main thread, we can resolve or reject the promise, by