Bug 916384 - Stop calling onaudioprocess on the ScriptProcessorNode it has no inputs or outputs. r=roc
authorPaul Adenot <paul@paul.cx>
Thu, 12 Dec 2013 15:31:51 +0100
changeset 160242 55e6046395260e2cf449a7c628bea2a482972cb5
parent 160241 bc103864507b1eb7172e7abc94a74116d123fc0f
child 160243 30e13828609f08f780a2da933541919ebf3bc20f
push id25827
push userkwierso@gmail.com
push dateFri, 13 Dec 2013 03:13:04 +0000
treeherdermozilla-central@1bc33fa19b24 [default view] [failures only]
perfherder[talos] [build metrics] [platform microbench] (compared to previous push)
reviewersroc
bugs916384
milestone29.0a1
first release with
nightly linux32
nightly linux64
nightly mac
nightly win32
nightly win64
last release without
nightly linux32
nightly linux64
nightly mac
nightly win32
nightly win64
Bug 916384 - Stop calling onaudioprocess on the ScriptProcessorNode it has no inputs or outputs. r=roc Quoting the spec: "audioprocess events are only dispatched if the ScriptProcessorNode has at least one input or one output connected".
content/media/MediaStreamGraph.h
content/media/webaudio/ScriptProcessorNode.cpp
content/media/webaudio/test/mochitest.ini
content/media/webaudio/test/test_scriptProcessorNodeNotConnected.html
content/media/webaudio/test/test_scriptProcessorNodeZeroInputOutput.html
--- a/content/media/MediaStreamGraph.h
+++ b/content/media/MediaStreamGraph.h
@@ -440,16 +440,20 @@ public:
   void AddConsumer(MediaInputPort* aPort)
   {
     mConsumers.AppendElement(aPort);
   }
   void RemoveConsumer(MediaInputPort* aPort)
   {
     mConsumers.RemoveElement(aPort);
   }
+  uint32_t ConsumerCount()
+  {
+    return mConsumers.Length();
+  }
   const StreamBuffer& GetStreamBuffer() { return mBuffer; }
   GraphTime GetStreamBufferStartTime() { return mBufferStartTime; }
   /**
    * Convert graph time to stream time. aTime must be <= mStateComputedTime
    * to ensure we know exactly how much time this stream will be blocked during
    * the interval.
    */
   StreamTime GraphTimeToStreamTime(GraphTime aTime);
@@ -939,16 +943,20 @@ public:
   virtual void RemoveInput(MediaInputPort* aPort)
   {
     mInputs.RemoveElement(aPort);
   }
   bool HasInputPort(MediaInputPort* aPort)
   {
     return mInputs.Contains(aPort);
   }
+  uint32_t InputPortCount()
+  {
+    return mInputs.Length();
+  }
   virtual void DestroyImpl();
   /**
    * This gets called after we've computed the blocking states for all
    * streams (mBlocked is up to date up to mStateComputedTime).
    * Also, we've produced output for all streams up to this one. If this stream
    * is not in a cycle, then all its source streams have produced data.
    * Generate output up to mStateComputedTime.
    * This is called only on streams that have not finished.
--- a/content/media/webaudio/ScriptProcessorNode.cpp
+++ b/content/media/webaudio/ScriptProcessorNode.cpp
@@ -62,16 +62,23 @@ private:
       mMutex.AssertCurrentThreadOwns();
       MOZ_ASSERT(!NS_IsMainThread());
       MOZ_ASSERT(ReadyToConsume() > 0);
       AudioChunk front = mBufferList.front();
       mBufferList.pop_front();
       return front;
     }
 
+    // Empties the buffer queue.
+    void Clear()
+    {
+      mMutex.AssertCurrentThreadOwns();
+      mBufferList.clear();
+    }
+
   private:
     typedef std::deque<AudioChunk> BufferList;
 
     // Synchronizes access to mBufferList.  Note that it's the responsibility
     // of the callers to perform the required locking, and we assert that every
     // time we access mBufferList.
     Mutex mMutex;
     // The list representing the queue.
@@ -162,16 +169,28 @@ public:
   }
 
   TrackTicks DelaySoFar() const
   {
     MOZ_ASSERT(!NS_IsMainThread());
     return mDelaySoFar == TRACK_TICKS_MAX ? 0 : mDelaySoFar;
   }
 
+  void Reset()
+  {
+    MOZ_ASSERT(!NS_IsMainThread());
+    mDelaySoFar = TRACK_TICKS_MAX;
+    mLatency = 0.0f;
+    {
+      MutexAutoLock lock(mOutputQueue.Lock());
+      mOutputQueue.Clear();
+    }
+    mLastEventTime = TimeStamp();
+  }
+
 private:
   OutputQueue mOutputQueue;
   // How much delay we've seen so far.  This measures the amount of delay
   // caused by the main thread lagging behind in producing output buffers.
   // TRACK_TICKS_MAX means that we have not received our first buffer yet.
   TrackTicks mDelaySoFar;
   // The samplerate of the context.
   float mSampleRate;
@@ -219,16 +238,28 @@ public:
     MutexAutoLock lock(NodeMutex());
 
     // If our node is dead, just output silence.
     if (!Node()) {
       aOutput->SetNull(WEBAUDIO_BLOCK_SIZE);
       return;
     }
 
+    // This node is not connected to anything. Per spec, we don't fire the
+    // onaudioprocess event. We also want to clear out the input and output
+    // buffer queue, and output a null buffer.
+    if (!(aStream->ConsumerCount() ||
+          aStream->AsProcessedStream()->InputPortCount())) {
+      aOutput->SetNull(WEBAUDIO_BLOCK_SIZE);
+      mSharedBuffers->Reset();
+      mSeenNonSilenceInput = false;
+      mInputWriteIndex = 0;
+      return;
+    }
+
     // First, record our input buffer
     for (uint32_t i = 0; i < mInputChannels.Length(); ++i) {
       if (aInput.IsNull()) {
         PodZero(mInputChannels[i] + mInputWriteIndex,
                 aInput.GetDuration());
       } else {
         mSeenNonSilenceInput = true;
         MOZ_ASSERT(aInput.GetDuration() == WEBAUDIO_BLOCK_SIZE, "sanity check");
--- a/content/media/webaudio/test/mochitest.ini
+++ b/content/media/webaudio/test/mochitest.ini
@@ -103,15 +103,16 @@ support-files =
 [test_pannerNodeAbove.html]
 [test_pannerNodeChannelCount.html]
 [test_pannerNodeTail.html]
 [test_pannerNode_equalPower.html]
 [test_periodicWave.html]
 [test_scriptProcessorNode.html]
 [test_scriptProcessorNodeChannelCount.html]
 [test_scriptProcessorNodeZeroInputOutput.html]
+[test_scriptProcessorNodeNotConnected.html]
 [test_singleSourceDest.html]
 [test_waveShaper.html]
 [test_waveShaperNoCurve.html]
 [test_waveShaperZeroLengthCurve.html]
 [test_audioDestinationNode.html]
 [test_mozaudiochannel.html]
 [test_waveDecoder.html]
new file mode 100644
--- /dev/null
+++ b/content/media/webaudio/test/test_scriptProcessorNodeNotConnected.html
@@ -0,0 +1,33 @@
+<!DOCTYPE HTML>
+<html>
+<head>
+  <title>Test AudioBufferSourceNode: should not fire audioprocess if not connected.</title>
+  <script type="text/javascript" src="/tests/SimpleTest/SimpleTest.js"></script>
+  <script type="text/javascript" src="webaudio.js"></script>
+  <link rel="stylesheet" type="text/css" href="/tests/SimpleTest/test.css" />
+</head>
+<body>
+<pre id="test">
+<script class="testbody" type="text/javascript">
+SimpleTest.waitForExplicitFinish();
+addLoadEvent(function() {
+  var context = new AudioContext();
+
+  var sp = context.createScriptProcessor(2048, 2, 2);
+  sp.onaudioprocess = function(e) {
+    ok(false, "Should not call onaudioprocess if the node is not connected.");
+    sp.onaudioprocess = null;
+    SimpleTest.finish();
+  };
+  setTimeout(function() {
+    console.log(sp.onaudioprocess);
+    if (sp.onaudioprocess) {
+      ok(true, "onaudioprocess not fired.");
+      SimpleTest.finish();
+    }
+  }, 4000);
+});
+</script>
+</pre>
+</body>
+</html>
--- a/content/media/webaudio/test/test_scriptProcessorNodeZeroInputOutput.html
+++ b/content/media/webaudio/test/test_scriptProcessorNodeZeroInputOutput.html
@@ -23,15 +23,17 @@ addLoadEvent(function() {
     sp = context.createScriptProcessor(2048, 2, 0);
     sp.onaudioprocess = function(e) {
       is(e.inputBuffer.numberOfChannels, 2, "Should have 2 input channels");
       is(e.outputBuffer.numberOfChannels, 0, "Should have 0 output channels");
       sp.onaudioprocess = null;
 
       SimpleTest.finish();
     };
+    sp.connect(context.destination);
   };
+  sp.connect(context.destination);
 });
 
 </script>
 </pre>
 </body>
 </html>