Bug 869224 - Use the same algorithm to compute the buffer start/end time and to convert AudioParam time values to ticks; r=roc
authorEhsan Akhgari <ehsan@mozilla.com>
Tue, 07 May 2013 23:31:15 -0400
changeset 142166 f5669ac3bf3663459ed7688dc45291d7482e3a3e
parent 142165 ade6bafb19e464ff4ccc52b03bb17f6f5b43c306
child 142167 d8b87a58ebf51a6348a3f1c23eeef97621d72c96
push id2579
push userakeybl@mozilla.com
push dateMon, 24 Jun 2013 18:52:47 +0000
treeherdermozilla-beta@b69b7de8a05a [default view] [failures only]
perfherder[talos] [build metrics] [platform microbench] (compared to previous push)
reviewersroc
bugs869224
milestone23.0a1
first release with
nightly linux32
nightly linux64
nightly mac
nightly win32
nightly win64
last release without
nightly linux32
nightly linux64
nightly mac
nightly win32
nightly win64
Bug 869224 - Use the same algorithm to compute the buffer start/end time and to convert AudioParam time values to ticks; r=roc
content/media/AudioNodeStream.cpp
content/media/webaudio/WebAudioUtils.cpp
content/media/webaudio/WebAudioUtils.h
content/media/webaudio/test/Makefile.in
content/media/webaudio/test/test_audioParamTimelineDestinationOffset.html
--- a/content/media/AudioNodeStream.cpp
+++ b/content/media/AudioNodeStream.cpp
@@ -48,20 +48,19 @@ AudioNodeStream::SetStreamTimeParameter(
   MOZ_ASSERT(this);
   GraphImpl()->AppendMessage(new Message(this, aIndex, aRelativeToStream, aStreamTime));
 }
 
 void
 AudioNodeStream::SetStreamTimeParameterImpl(uint32_t aIndex, MediaStream* aRelativeToStream,
                                             double aStreamTime)
 {
-  StreamTime streamTime = std::max<MediaTime>(0, SecondsToMediaTime(aStreamTime));
-  GraphTime graphTime = aRelativeToStream->StreamTimeToGraphTime(streamTime);
-  StreamTime thisStreamTime = GraphTimeToStreamTimeOptimistic(graphTime);
-  TrackTicks ticks = TimeToTicksRoundUp(IdealAudioRate(), thisStreamTime);
+  TrackTicks ticks =
+      WebAudioUtils::ConvertDestinationStreamTimeToSourceStreamTime(
+          aStreamTime, this, aRelativeToStream);
   mEngine->SetStreamTimeParameter(aIndex, ticks);
 }
 
 void
 AudioNodeStream::SetDoubleParameter(uint32_t aIndex, double aValue)
 {
   class Message : public ControlMessage {
   public:
--- a/content/media/webaudio/WebAudioUtils.cpp
+++ b/content/media/webaudio/WebAudioUtils.cpp
@@ -13,32 +13,39 @@ namespace dom {
 
 struct ConvertTimeToTickHelper
 {
   AudioNodeStream* mSourceStream;
   AudioNodeStream* mDestinationStream;
 
   static int64_t Convert(double aTime, void* aClosure)
   {
-    TrackRate sampleRate = IdealAudioRate();
-    StreamTime streamTime;
-
     ConvertTimeToTickHelper* This = static_cast<ConvertTimeToTickHelper*> (aClosure);
     if (This->mSourceStream) {
-      TrackTicks tick = This->mDestinationStream->GetCurrentPosition();
-      StreamTime destinationStreamTime = TicksToTimeRoundDown(sampleRate, tick);
-      GraphTime graphTime = This->mDestinationStream->StreamTimeToGraphTime(destinationStreamTime);
-      streamTime = This->mSourceStream->GraphTimeToStreamTime(graphTime);
+      return WebAudioUtils::ConvertDestinationStreamTimeToSourceStreamTime(
+          aTime, This->mSourceStream, This->mDestinationStream);
     } else {
-      streamTime = This->mDestinationStream->GetCurrentPosition();
+      StreamTime streamTime = This->mDestinationStream->GetCurrentPosition();
+      return TimeToTicksRoundUp(IdealAudioRate(), streamTime + SecondsToMediaTime(aTime));
     }
-    return TimeToTicksRoundDown(sampleRate, streamTime + SecondsToMediaTime(aTime));
   }
 };
 
+TrackTicks
+WebAudioUtils::ConvertDestinationStreamTimeToSourceStreamTime(double aTime,
+                                                              MediaStream* aSource,
+                                                              MediaStream* aDestination)
+{
+  StreamTime streamTime = std::max<MediaTime>(0, SecondsToMediaTime(aTime));
+  GraphTime graphTime = aDestination->StreamTimeToGraphTime(streamTime);
+  StreamTime thisStreamTime = aSource->GraphTimeToStreamTimeOptimistic(graphTime);
+  TrackTicks ticks = TimeToTicksRoundUp(IdealAudioRate(), thisStreamTime);
+  return ticks;
+}
+
 double
 WebAudioUtils::StreamPositionToDestinationTime(TrackTicks aSourcePosition,
                                                AudioNodeStream* aSource,
                                                AudioNodeStream* aDestination)
 {
   StreamTime sourceTime = TicksToTimeRoundDown(IdealAudioRate(), aSourcePosition);
   GraphTime graphTime = aSource->StreamTimeToGraphTime(sourceTime);
   StreamTime destinationTime = aDestination->GraphTimeToStreamTimeOptimistic(graphTime);
--- a/content/media/webaudio/WebAudioUtils.h
+++ b/content/media/webaudio/WebAudioUtils.h
@@ -37,16 +37,25 @@ struct WebAudioUtils {
    * over aDuration seconds.
    */
   static double ComputeSmoothingRate(double aDuration, double aSampleRate)
   {
     return 1.0 - std::exp(-1.0 / (aDuration * aSampleRate));
   }
 
   /**
+   * Convert a time in second relative to the destination stream to
+   * TrackTicks relative to the source stream.
+   */
+  static TrackTicks
+  ConvertDestinationStreamTimeToSourceStreamTime(double aTime,
+                                                 MediaStream* aSource,
+                                                 MediaStream* aDestination);
+
+  /**
    * Converts AudioParamTimeline floating point time values to tick values
    * with respect to a source and a destination AudioNodeStream.
    *
    * This needs to be called for each AudioParamTimeline that gets sent to an
    * AudioNodeEngine on the engine side where the AudioParamTimeline is
    * received.  This means that such engines need to be aware of their source
    * and destination streams as well.
    */
--- a/content/media/webaudio/test/Makefile.in
+++ b/content/media/webaudio/test/Makefile.in
@@ -26,16 +26,17 @@ MOCHITEST_FILES := \
   test_AudioBuffer.html \
   test_AudioContext.html \
   test_AudioListener.html \
   test_AudioParam.html \
   test_audioParamExponentialRamp.html \
   test_audioParamLinearRamp.html \
   test_audioParamSetCurveAtTime.html \
   test_audioParamSetTargetAtTime.html \
+  test_audioParamTimelineDestinationOffset.html \
   test_audioBufferSourceNode.html \
   test_audioBufferSourceNodeLazyLoopParam.html \
   test_audioBufferSourceNodeLoop.html \
   test_audioBufferSourceNodeLoopStartEnd.html \
   test_audioBufferSourceNodeLoopStartEndSame.html \
   test_audioBufferSourceNodeNullBuffer.html \
   test_badConnect.html \
   test_biquadFilterNode.html \
new file mode 100644
--- /dev/null
+++ b/content/media/webaudio/test/test_audioParamTimelineDestinationOffset.html
@@ -0,0 +1,55 @@
+<!DOCTYPE HTML>
+<html>
+<head>
+  <title>Test AudioParam timeline events scheduled after the destination stream has started playback</title>
+  <script type="text/javascript" src="/tests/SimpleTest/SimpleTest.js"></script>
+  <script type="text/javascript" src="webaudio.js"></script>
+  <link rel="stylesheet" type="text/css" href="/tests/SimpleTest/test.css" />
+</head>
+<body>
+<pre id="test">
+<script class="testbody" type="text/javascript">
+
+SimpleTest.waitForExplicitFinish();
+addLoadEvent(function() {
+  SpecialPowers.setBoolPref("media.webaudio.enabled", true);
+
+  var context = new AudioContext();
+
+  var sourceBuffer = context.createBuffer(1, 2048, context.sampleRate);
+  for (var i = 0; i < 2048; ++i) {
+    sourceBuffer.getChannelData(0)[i] = 1;
+  }
+  var emptyBuffer = context.createBuffer(1, 16384, context.sampleRate);
+
+  setTimeout(function() {
+    var source = context.createBufferSource();
+    source.buffer = sourceBuffer;
+    source.start(context.currentTime);
+    source.stop(context.currentTime + sourceBuffer.duration);
+
+    var gain = context.createGain();
+    gain.gain.setValueAtTime(0, context.currentTime);
+    gain.gain.setTargetAtTime(0, context.currentTime + sourceBuffer.duration, 1);
+    source.connect(gain);
+
+    var sp = context.createScriptProcessor(16384, 1);
+    gain.connect(sp);
+    sp.connect(context.destination);
+
+    sp.onaudioprocess = function(e) {
+      is(e.inputBuffer.numberOfChannels, 1, "Correct input channel count");
+      compareBuffers(e.inputBuffer.getChannelData(0), emptyBuffer.getChannelData(0));
+
+      sp.onaudioprocess = null;
+
+      SpecialPowers.clearUserPref("media.webaudio.enabled");
+      SimpleTest.finish();
+    };
+  }, 100);
+});
+
+</script>
+</pre>
+</body>
+</html>