Bug 856361. Part 5: Implement MediaStreamAudioSourceNode. r=ehsan, a=webaudio
authorsecretrobotron <secretrobotron@gmail.com>
Wed, 24 Jul 2013 23:29:39 +1200
changeset 153846 a65f0cf63a0570ac5474715b341bb9c7a0a3d37a
parent 153845 29afb36824078849197f78e5dbe9fab3561ff5ff
child 153847 f7fa762c48afed42851b10a3791cdaa845de9fbf
push id2859
push userakeybl@mozilla.com
push dateMon, 16 Sep 2013 19:14:59 +0000
treeherdermozilla-beta@87d3c51cd2bf [default view] [failures only]
perfherder[talos] [build metrics] [platform microbench] (compared to previous push)
reviewersehsan, webaudio
bugs856361
milestone25.0a2
Bug 856361. Part 5: Implement MediaStreamAudioSourceNode. r=ehsan, a=webaudio
content/media/MediaStreamGraph.cpp
content/media/MediaStreamGraph.h
content/media/moz.build
content/media/webaudio/AudioContext.cpp
content/media/webaudio/AudioContext.h
content/media/webaudio/MediaStreamAudioSourceNode.cpp
content/media/webaudio/MediaStreamAudioSourceNode.h
content/media/webaudio/moz.build
content/media/webaudio/test/Makefile.in
content/media/webaudio/test/test_mediaStreamAudioSourceNode.html
content/media/webaudio/test/test_mediaStreamAudioSourceNodeCrossOrigin.html
content/media/webaudio/test/test_mediaStreamAudioSourceNodeResampling.html
content/media/webaudio/test/webaudio.js
dom/webidl/AudioContext.webidl
dom/webidl/MediaStreamAudioSourceNode.webidl
dom/webidl/WebIDL.mk
--- a/content/media/MediaStreamGraph.cpp
+++ b/content/media/MediaStreamGraph.cpp
@@ -17,16 +17,17 @@
 #include "prlog.h"
 #include "VideoUtils.h"
 #include "mozilla/Attributes.h"
 #include "TrackUnionStream.h"
 #include "ImageContainer.h"
 #include "AudioChannelCommon.h"
 #include "AudioNodeEngine.h"
 #include "AudioNodeStream.h"
+#include "AudioNodeExternalInputStream.h"
 #include <algorithm>
 #include "DOMMediaStream.h"
 #include "GeckoProfiler.h"
 
 using namespace mozilla::layers;
 using namespace mozilla::dom;
 
 namespace mozilla {
@@ -2311,16 +2312,31 @@ MediaStreamGraph::CreateTrackUnionStream
   TrackUnionStream* stream = new TrackUnionStream(aWrapper);
   NS_ADDREF(stream);
   MediaStreamGraphImpl* graph = static_cast<MediaStreamGraphImpl*>(this);
   stream->SetGraphImpl(graph);
   graph->AppendMessage(new CreateMessage(stream));
   return stream;
 }
 
+AudioNodeExternalInputStream*
+MediaStreamGraph::CreateAudioNodeExternalInputStream(AudioNodeEngine* aEngine, TrackRate aSampleRate)
+{
+  MOZ_ASSERT(NS_IsMainThread());
+  if (!aSampleRate) {
+    aSampleRate = aEngine->NodeMainThread()->Context()->SampleRate();
+  }
+  AudioNodeExternalInputStream* stream = new AudioNodeExternalInputStream(aEngine, aSampleRate);
+  NS_ADDREF(stream);
+  MediaStreamGraphImpl* graph = static_cast<MediaStreamGraphImpl*>(this);
+  stream->SetGraphImpl(graph);
+  graph->AppendMessage(new CreateMessage(stream));
+  return stream;
+}
+
 AudioNodeStream*
 MediaStreamGraph::CreateAudioNodeStream(AudioNodeEngine* aEngine,
                                         AudioNodeStreamKind aKind,
                                         TrackRate aSampleRate)
 {
   MOZ_ASSERT(NS_IsMainThread());
   if (!aSampleRate) {
     aSampleRate = aEngine->NodeMainThread()->Context()->SampleRate();
--- a/content/media/MediaStreamGraph.h
+++ b/content/media/MediaStreamGraph.h
@@ -186,18 +186,19 @@ class MainThreadMediaStreamListener {
 public:
   virtual void NotifyMainThreadStateChanged() = 0;
 };
 
 class MediaStreamGraphImpl;
 class SourceMediaStream;
 class ProcessedMediaStream;
 class MediaInputPort;
+class AudioNodeEngine;
+class AudioNodeExternalInputStream;
 class AudioNodeStream;
-class AudioNodeEngine;
 struct AudioChunk;
 
 /**
  * A stream of synchronized audio and video data. All (not blocked) streams
  * progress at the same rate --- "real time". Streams cannot seek. The only
  * operation readers can perform on a stream is to read the next data.
  *
  * Consumers of a stream can be reading from it at different offsets, but that
@@ -355,16 +356,17 @@ public:
   bool IsDestroyed()
   {
     NS_ASSERTION(NS_IsMainThread(), "Call only on main thread");
     return mMainThreadDestroyed;
   }
 
   friend class MediaStreamGraphImpl;
   friend class MediaInputPort;
+  friend class AudioNodeExternalInputStream;
 
   virtual SourceMediaStream* AsSourceStream() { return nullptr; }
   virtual ProcessedMediaStream* AsProcessedStream() { return nullptr; }
   virtual AudioNodeStream* AsAudioNodeStream() { return nullptr; }
 
   // media graph thread only
   void Init();
   // These Impl methods perform the core functionality of the control methods
@@ -963,16 +965,21 @@ public:
    * Create a stream that will process audio for an AudioNode.
    * Takes ownership of aEngine.  aSampleRate is the sampling rate used
    * for the stream.  If 0 is passed, the sampling rate of the engine's
    * node will get used.
    */
   AudioNodeStream* CreateAudioNodeStream(AudioNodeEngine* aEngine,
                                          AudioNodeStreamKind aKind,
                                          TrackRate aSampleRate = 0);
+
+  AudioNodeExternalInputStream*
+  CreateAudioNodeExternalInputStream(AudioNodeEngine* aEngine,
+                                     TrackRate aSampleRate = 0);
+
   /**
    * Returns the number of graph updates sent. This can be used to track
    * whether a given update has been processed by the graph thread and reflected
    * in main-thread stream state.
    */
   int64_t GetCurrentGraphUpdateIndex() { return mGraphUpdatesSent; }
   /**
    * Start processing non-realtime for a specific number of ticks.
--- a/content/media/moz.build
+++ b/content/media/moz.build
@@ -48,16 +48,17 @@ TEST_DIRS += ['test']
 MODULE = 'content'
 
 EXPORTS += [
     'AbstractMediaDecoder.h',
     'AudioAvailableEventManager.h',
     'AudioChannelFormat.h',
     'AudioEventTimeline.h',
     'AudioNodeEngine.h',
+    'AudioNodeExternalInputStream.h',
     'AudioNodeStream.h',
     'AudioSampleFormat.h',
     'AudioSegment.h',
     'AudioStream.h',
     'BufferMediaResource.h',
     'DOMMediaStream.h',
     'DecoderTraits.h',
     'EncodedBufferCache.h',
@@ -92,16 +93,17 @@ EXPORTS.mozilla.dom += [
     'VideoPlaybackQuality.h',
     'VideoStreamTrack.h',
 ]
 
 CPP_SOURCES += [
     'AudioAvailableEventManager.cpp',
     'AudioChannelFormat.cpp',
     'AudioNodeEngine.cpp',
+    'AudioNodeExternalInputStream.cpp',
     'AudioNodeStream.cpp',
     'AudioSegment.cpp',
     'AudioStream.cpp',
     'AudioStreamTrack.cpp',
     'DOMMediaStream.cpp',
     'DecoderTraits.cpp',
     'EncodedBufferCache.cpp',
     'FileBlockCache.cpp',
--- a/content/media/webaudio/AudioContext.cpp
+++ b/content/media/webaudio/AudioContext.cpp
@@ -11,16 +11,17 @@
 #include "mozilla/dom/AudioContextBinding.h"
 #include "mozilla/dom/OfflineAudioContextBinding.h"
 #include "MediaStreamGraph.h"
 #include "mozilla/dom/AnalyserNode.h"
 #include "AudioDestinationNode.h"
 #include "AudioBufferSourceNode.h"
 #include "AudioBuffer.h"
 #include "GainNode.h"
+#include "MediaStreamAudioSourceNode.h"
 #include "DelayNode.h"
 #include "PannerNode.h"
 #include "AudioListener.h"
 #include "DynamicsCompressorNode.h"
 #include "BiquadFilterNode.h"
 #include "ScriptProcessorNode.h"
 #include "ChannelMergerNode.h"
 #include "ChannelSplitterNode.h"
@@ -250,16 +251,28 @@ AudioContext::CreateScriptProcessor(uint
 
 already_AddRefed<AnalyserNode>
 AudioContext::CreateAnalyser()
 {
   nsRefPtr<AnalyserNode> analyserNode = new AnalyserNode(this);
   return analyserNode.forget();
 }
 
+already_AddRefed<MediaStreamAudioSourceNode>
+AudioContext::CreateMediaStreamSource(const DOMMediaStream& aMediaStream,
+                                      ErrorResult& aRv)
+{
+  if (mIsOffline) {
+    aRv.Throw(NS_ERROR_DOM_NOT_SUPPORTED_ERR);
+    return nullptr;
+  }
+  nsRefPtr<MediaStreamAudioSourceNode> mediaStreamAudioSourceNode = new MediaStreamAudioSourceNode(this, &aMediaStream);
+  return mediaStreamAudioSourceNode.forget();
+}
+
 already_AddRefed<GainNode>
 AudioContext::CreateGain()
 {
   nsRefPtr<GainNode> gainNode = new GainNode(this);
   return gainNode.forget();
 }
 
 already_AddRefed<WaveShaperNode>
--- a/content/media/webaudio/AudioContext.h
+++ b/content/media/webaudio/AudioContext.h
@@ -158,16 +158,19 @@ public:
   CreateWaveShaper();
 
   already_AddRefed<GainNode>
   CreateGainNode()
   {
     return CreateGain();
   }
 
+  already_AddRefed<MediaStreamAudioSourceNode>
+  CreateMediaStreamSource(const DOMMediaStream& aMediaStream, ErrorResult& aRv);
+
   already_AddRefed<DelayNode>
   CreateDelay(double aMaxDelayTime, ErrorResult& aRv);
 
   already_AddRefed<DelayNode>
   CreateDelayNode(double aMaxDelayTime, ErrorResult& aRv)
   {
     return CreateDelay(aMaxDelayTime, aRv);
   }
new file mode 100644
--- /dev/null
+++ b/content/media/webaudio/MediaStreamAudioSourceNode.cpp
@@ -0,0 +1,48 @@
+/* -*- Mode: C++; tab-width: 2; indent-tabs-mode: nil; c-basic-offset: 2 -*- */
+/* vim:set ts=2 sw=2 sts=2 et cindent: */
+/* This Source Code Form is subject to the terms of the Mozilla Public
+ * License, v. 2.0. If a copy of the MPL was not distributed with this
+ * file, You can obtain one at http://mozilla.org/MPL/2.0/. */
+
+#include "MediaStreamAudioSourceNode.h"
+#include "mozilla/dom/MediaStreamAudioSourceNodeBinding.h"
+#include "AudioNodeEngine.h"
+#include "AudioNodeExternalInputStream.h"
+#include "DOMMediaStream.h"
+
+namespace mozilla {
+namespace dom {
+
+MediaStreamAudioSourceNode::MediaStreamAudioSourceNode(AudioContext* aContext,
+                                                       const DOMMediaStream* aMediaStream)
+  : AudioNode(aContext,
+              2,
+              ChannelCountMode::Max,
+              ChannelInterpretation::Speakers)
+{
+  AudioNodeEngine* engine = new AudioNodeEngine(this);
+  mStream = aContext->Graph()->CreateAudioNodeExternalInputStream(engine);
+  ProcessedMediaStream* outputStream = static_cast<ProcessedMediaStream*>(mStream.get());
+  mInputPort = outputStream->AllocateInputPort(aMediaStream->GetStream(),
+                                               MediaInputPort::FLAG_BLOCK_INPUT);
+}
+
+void
+MediaStreamAudioSourceNode::DestroyMediaStream()
+{
+  if (mInputPort) {
+    mInputPort->Destroy();
+    mInputPort = nullptr;
+  }
+  AudioNode::DestroyMediaStream();
+}
+
+JSObject*
+MediaStreamAudioSourceNode::WrapObject(JSContext* aCx, JS::Handle<JSObject*> aScope)
+{
+  return MediaStreamAudioSourceNodeBinding::Wrap(aCx, aScope, this);
+}
+
+}
+}
+
new file mode 100644
--- /dev/null
+++ b/content/media/webaudio/MediaStreamAudioSourceNode.h
@@ -0,0 +1,34 @@
+/* -*- Mode: C++; tab-width: 2; indent-tabs-mode: nil; c-basic-offset: 2 -*- */
+/* vim:set ts=2 sw=2 sts=2 et cindent: */
+/* This Source Code Form is subject to the terms of the Mozilla Public
+ * License, v. 2.0. If a copy of the MPL was not distributed with this
+ * file, You can obtain one at http://mozilla.org/MPL/2.0/. */
+
+#ifndef MediaStreamAudioSourceNode_h_
+#define MediaStreamAudioSourceNode_h_
+
+#include "AudioNode.h"
+
+namespace mozilla {
+namespace dom {
+
+class MediaStreamAudioSourceNode : public AudioNode
+{
+public:
+  MediaStreamAudioSourceNode(AudioContext* aContext, const DOMMediaStream* aMediaStream);
+
+  virtual JSObject* WrapObject(JSContext* aCx, JS::Handle<JSObject*> aScope) MOZ_OVERRIDE;
+
+  virtual void DestroyMediaStream() MOZ_OVERRIDE;
+
+  virtual uint16_t NumberOfInputs() const MOZ_OVERRIDE { return 0; }
+
+private:
+  nsRefPtr<MediaInputPort> mInputPort;
+};
+
+}
+}
+
+#endif
+
--- a/content/media/webaudio/moz.build
+++ b/content/media/webaudio/moz.build
@@ -35,16 +35,17 @@ EXPORTS.mozilla.dom += [
     'ChannelMergerNode.h',
     'ChannelSplitterNode.h',
     'ConvolverNode.h',
     'DelayNode.h',
     'DynamicsCompressorNode.h',
     'EnableWebAudioCheck.h',
     'GainNode.h',
     'MediaStreamAudioDestinationNode.h',
+    'MediaStreamAudioSourceNode.h',
     'OfflineAudioCompletionEvent.h',
     'OscillatorNode.h',
     'PannerNode.h',
     'PeriodicWave.h',
     'ScriptProcessorNode.h',
     'WaveShaperNode.h',
 ]
 
@@ -65,16 +66,17 @@ CPP_SOURCES += [
     'DelayNode.cpp',
     'DelayProcessor.cpp',
     'DynamicsCompressorNode.cpp',
     'EnableWebAudioCheck.cpp',
     'FFTBlock.cpp',
     'GainNode.cpp',
     'MediaBufferDecoder.cpp',
     'MediaStreamAudioDestinationNode.cpp',
+    'MediaStreamAudioSourceNode.cpp',
     'OfflineAudioCompletionEvent.cpp',
     'OscillatorNode.cpp',
     'PannerNode.cpp',
     'PeriodicWave.cpp',
     'ScriptProcessorNode.cpp',
     'ThreeDPoint.cpp',
     'WaveShaperNode.cpp',
     'WebAudioUtils.cpp',
--- a/content/media/webaudio/test/Makefile.in
+++ b/content/media/webaudio/test/Makefile.in
@@ -63,16 +63,19 @@ MOCHITEST_FILES := \
   test_delayNodeSmallMaxDelay.html \
   test_delayNodeWithGain.html \
   test_dynamicsCompressorNode.html \
   test_gainNode.html \
   test_gainNodeInLoop.html \
   test_maxChannelCount.html \
   test_mediaDecoding.html \
   test_mediaStreamAudioDestinationNode.html \
+  test_mediaStreamAudioSourceNode.html \
+  test_mediaStreamAudioSourceNodeCrossOrigin.html \
+  test_mediaStreamAudioSourceNodeResampling.html \
   test_mixingRules.html \
   test_nodeToParamConnection.html \
   test_OfflineAudioContext.html \
   test_offlineDestinationChannelCountLess.html \
   test_offlineDestinationChannelCountMore.html \
   test_oscillatorNode.html \
   test_pannerNode.html \
   test_pannerNode_equalPower.html \
new file mode 100644
--- /dev/null
+++ b/content/media/webaudio/test/test_mediaStreamAudioSourceNode.html
@@ -0,0 +1,44 @@
+<!DOCTYPE HTML>
+<html>
+<meta charset="utf-8">
+<head>
+  <title>Test MediaStreamAudioSourceNode processing is correct</title>
+  <script type="text/javascript" src="/tests/SimpleTest/SimpleTest.js"></script>
+  <script type="text/javascript" src="webaudio.js"></script>
+  <link rel="stylesheet" type="text/css" href="/tests/SimpleTest/test.css" />
+</head>
+<body>
+<pre id="test">
+<script class="testbody" type="text/javascript">
+
+var gTest = {
+  length: 2048,
+  skipOfflineContextTests: true,
+  createGraph: function(context) {
+    var sourceGraph = new AudioContext();
+    var source = sourceGraph.createBufferSource();
+    source.buffer = this.buffer;
+    var dest = sourceGraph.createMediaStreamDestination();
+    source.connect(dest);
+    source.start(0);
+
+    var mediaStreamSource = context.createMediaStreamSource(dest.stream);
+    return mediaStreamSource;
+  },
+  createExpectedBuffers: function(context) {
+    var buffer = context.createBuffer(2, 2048, context.sampleRate);
+    for (var i = 0; i < 2048; ++i) {
+      buffer.getChannelData(0)[i] = Math.sin(440 * 2 * Math.PI * i / context.sampleRate);
+      buffer.getChannelData(1)[i] = buffer.getChannelData(0)[i];
+    }
+    this.buffer = buffer;
+    return buffer;
+  },
+};
+
+runTest();
+
+</script>
+</pre>
+</body>
+</html>
new file mode 100644
--- /dev/null
+++ b/content/media/webaudio/test/test_mediaStreamAudioSourceNodeCrossOrigin.html
@@ -0,0 +1,57 @@
+<!DOCTYPE HTML>
+<html>
+<meta charset="utf-8">
+<head>
+  <title>Test MediaStreamAudioSourceNode doesn't get data from cross-origin media resources</title>
+  <script type="text/javascript" src="/tests/SimpleTest/SimpleTest.js"></script>
+  <link rel="stylesheet" type="text/css" href="/tests/SimpleTest/test.css" />
+</head>
+<body>
+<pre id="test">
+<script class="testbody" type="text/javascript">
+SimpleTest.waitForExplicitFinish();
+
+var audio = new Audio("http://example.org:80/tests/content/media/webaudio/test/small-shot.ogg");
+var context = new AudioContext();
+var node = context.createMediaStreamSource(audio.mozCaptureStreamUntilEnded());
+var sp = context.createScriptProcessor(2048, 1);
+node.connect(sp);
+var nonzeroSampleCount = 0;
+var complete = false;
+var iterationCount = 0;
+
+// This test ensures we receive at least expectedSampleCount nonzero samples
+function processSamples(e) {
+  if (complete) {
+    return;
+  }
+
+  if (iterationCount == 0) {
+    // Don't start playing the audio until the AudioContext stuff is connected
+    // and running.
+    audio.play();
+  }
+  ++iterationCount;
+
+  var buf = e.inputBuffer.getChannelData(0);
+  var nonzeroSamplesThisBuffer = 0;
+  for (var i = 0; i < buf.length; ++i) {
+    if (buf[i] != 0) {
+      ++nonzeroSamplesThisBuffer;
+    }
+  }
+  is(nonzeroSamplesThisBuffer, 0,
+     "Checking all samples are zero");
+  if (iterationCount >= 20) {
+    SimpleTest.finish();
+    complete = true;
+  }
+}
+
+audio.oncanplaythrough = function() {
+  sp.onaudioprocess = processSamples;
+};
+</script>
+</pre>
+</body>
+</html>
new file mode 100644
--- /dev/null
+++ b/content/media/webaudio/test/test_mediaStreamAudioSourceNodeResampling.html
@@ -0,0 +1,69 @@
+<!DOCTYPE HTML>
+<html>
+<meta charset="utf-8">
+<head>
+  <title>Test MediaStreamAudioSourceNode processing is correct</title>
+  <script type="text/javascript" src="/tests/SimpleTest/SimpleTest.js"></script>
+  <link rel="stylesheet" type="text/css" href="/tests/SimpleTest/test.css" />
+</head>
+<body>
+<pre id="test">
+<script class="testbody" type="text/javascript">
+SimpleTest.waitForExplicitFinish();
+
+var audio = new Audio("small-shot.ogg");
+var context = new AudioContext();
+var node = context.createMediaStreamSource(audio.mozCaptureStreamUntilEnded());
+var sp = context.createScriptProcessor(2048, 1);
+node.connect(sp);
+var expectedMinNonzeroSampleCount;
+var expectedMaxNonzeroSampleCount;
+var nonzeroSampleCount = 0;
+var complete = false;
+var iterationCount = 0;
+
+// This test ensures we receive at least expectedSampleCount nonzero samples
+function processSamples(e) {
+  if (complete) {
+    return;
+  }
+
+  if (iterationCount == 0) {
+    // Don't start playing the audio until the AudioContext stuff is connected
+    // and running.
+    audio.play();
+  }
+  ++iterationCount;
+
+  var buf = e.inputBuffer.getChannelData(0);
+  var nonzeroSamplesThisBuffer = 0;
+  for (var i = 0; i < buf.length; ++i) {
+    if (buf[i] != 0) {
+      ++nonzeroSamplesThisBuffer;
+    }
+  }
+  nonzeroSampleCount += nonzeroSamplesThisBuffer;
+  is(e.inputBuffer.numberOfChannels, 1,
+     "Checking data channel count (nonzeroSamplesThisBuffer=" +
+     nonzeroSamplesThisBuffer + ")");
+  ok(nonzeroSampleCount <= expectedMaxNonzeroSampleCount,
+     "Too many nonzero samples (got " + nonzeroSampleCount + ", expected max " + expectedMaxNonzeroSampleCount + ")");
+  if (nonzeroSampleCount >= expectedMinNonzeroSampleCount &&
+      nonzeroSamplesThisBuffer == 0) {
+    ok(true,
+     "Check received enough nonzero samples (got " + nonzeroSampleCount + ", expected min " + expectedMinNonzeroSampleCount + ")");
+    SimpleTest.finish();
+    complete = true;
+  }
+}
+
+audio.oncanplaythrough = function() {
+  // Use a fuzz factor of 100 to account for samples that just happen to be zero
+  expectedMinNonzeroSampleCount = Math.floor(audio.duration*context.sampleRate) - 100;
+  expectedMaxNonzeroSampleCount = Math.floor(audio.duration*context.sampleRate) + 500;
+  sp.onaudioprocess = processSamples;
+};
+</script>
+</pre>
+</body>
+</html>
--- a/content/media/webaudio/test/webaudio.js
+++ b/content/media/webaudio/test/webaudio.js
@@ -51,17 +51,17 @@ function compareBuffers(buf1, buf2,
       if (firstBadIndex == -1) {
         firstBadIndex = i;
       }
     }
   };
 
   is(difference, 0, "Found " + difference + " different samples, maxDifference: " +
      maxDifference + ", first bad index: " + firstBadIndex +
-     " with source offset " + sourceOffset + " and desitnation offset " +
+     " with source offset " + sourceOffset + " and destination offset " +
      destOffset);
 }
 
 function getEmptyBuffer(context, length) {
   return context.createBuffer(gTest.numberOfChannels, length, context.sampleRate);
 }
 
 /**
@@ -82,25 +82,27 @@ function getEmptyBuffer(context, length)
  *                     or createGraph must be provided.
  * + createExpectedBuffers: optional method which takes a context object and
  *                          returns either one expected buffer or an array of
  *                          them, designating what is expected to be observed
  *                          in the output.  If omitted, the output is expected
  *                          to be silence.  The sum of the length of the expected
  *                          buffers should be equal to gTest.length.  This
  *                          function is guaranteed to be called before createGraph.
+ * + skipOfflineContextTests: optional. when true, skips running tests on an offline
+ *                            context by circumventing testOnOfflineContext.
  */
 function runTest()
 {
   function done() {
     SimpleTest.finish();
   }
 
   SimpleTest.waitForExplicitFinish();
-  addLoadEvent(function() {
+  function runTestFunction () {
     if (!gTest.numberOfChannels) {
       gTest.numberOfChannels = 2; // default
     }
 
     var testLength;
 
     function runTestOnContext(context, callback, testOutput) {
       if (!gTest.createExpectedBuffers) {
@@ -172,19 +174,30 @@ function runTest()
                              true);
             }
             samplesSeen += expectedBuffer.length;
           }
           callback();
         };
         context.startRendering();
       }
+
       var context = new OfflineAudioContext(gTest.numberOfChannels, testLength, sampleRate);
       runTestOnContext(context, callback, testOutput);
     }
 
     testOnNormalContext(function() {
-      testOnOfflineContext(function() {
-        testOnOfflineContext(done, 44100);
-      }, 48000);
+      if (!gTest.skipOfflineContextTests) {
+        testOnOfflineContext(function() {
+          testOnOfflineContext(done, 44100);
+        }, 48000);
+      } else {
+        done();
+      }
     });
-  });
+  };
+
+  if (document.readyState !== 'complete') {
+    addLoadEvent(runTestFunction);
+  } else {
+    runTestFunction();
+  }
 }
--- a/dom/webidl/AudioContext.webidl
+++ b/dom/webidl/AudioContext.webidl
@@ -37,16 +37,18 @@ interface AudioContext : EventTarget {
 
     [Creator, Throws]
     ScriptProcessorNode createScriptProcessor(optional unsigned long bufferSize = 0,
                                               optional unsigned long numberOfInputChannels = 2,
                                               optional unsigned long numberOfOutputChannels = 2);
 
     [Creator]
     AnalyserNode createAnalyser();
+    [Creator, Throws]
+    MediaStreamAudioSourceNode createMediaStreamSource(MediaStream mediaStream);
     [Creator]
     GainNode createGain();
     [Creator, Throws]
     DelayNode createDelay(optional double maxDelayTime = 1);
     [Creator]
     BiquadFilterNode createBiquadFilter();
     [Creator]
     WaveShaperNode createWaveShaper();
new file mode 100644
--- /dev/null
+++ b/dom/webidl/MediaStreamAudioSourceNode.webidl
@@ -0,0 +1,16 @@
+/* -*- Mode: IDL; tab-width: 2; indent-tabs-mode: nil; c-basic-offset: 2 -*- */
+/* This Source Code Form is subject to the terms of the Mozilla Public
+ * License, v. 2.0. If a copy of the MPL was not distributed with this file,
+ * You can obtain one at http://mozilla.org/MPL/2.0/.
+ *
+ * The origin of this IDL file is
+ * https://dvcs.w3.org/hg/audio/raw-file/tip/webaudio/specification.html
+ *
+ * Copyright © 2012 W3C® (MIT, ERCIM, Keio), All Rights Reserved. W3C
+ * liability, trademark and document use rules apply.
+ */
+
+interface MediaStreamAudioSourceNode : AudioNode {
+
+};
+
--- a/dom/webidl/WebIDL.mk
+++ b/dom/webidl/WebIDL.mk
@@ -183,16 +183,17 @@ webidl_files = \
   LinkStyle.webidl \
   LocalMediaStream.webidl \
   Location.webidl \
   MediaError.webidl \
   MediaRecorder.webidl \
   MediaSource.webidl \
   MediaStream.webidl \
   MediaStreamAudioDestinationNode.webidl \
+  MediaStreamAudioSourceNode.webidl \
   MediaStreamEvent.webidl \
   MediaStreamTrack.webidl \
   MessageEvent.webidl \
   MimeType.webidl \
   MimeTypeArray.webidl \
   MobileMessageManager.webidl \
   MouseEvent.webidl \
   MouseScrollEvent.webidl \