Bug 1308433 - In automation methods, when startTime is < AudioContext.currentTime, clamp to AudioContext.currentTime; r=padenot
authorDan Minor <dminor@mozilla.com>
Tue, 08 Nov 2016 09:29:35 -0500
changeset 322972 b63a68fbfd2eafeef6c68e974f5382fe73681b6a
parent 322971 ba20391edbec448b56725ef01158176614706b4f
child 322973 b54fc5d9f63587c33976569efd34013fcc89969f
push id30966
push userphilringnalda@gmail.com
push dateFri, 18 Nov 2016 03:14:32 +0000
treeherdermozilla-central@741cfaed340d [default view] [failures only]
perfherder[talos] [build metrics] [platform microbench] (compared to previous push)
reviewerspadenot
bugs1308433
milestone53.0a1
first release with
nightly linux32
nightly linux64
nightly mac
nightly win32
nightly win64
last release without
nightly linux32
nightly linux64
nightly mac
nightly win32
nightly win64
Bug 1308433 - In automation methods, when startTime is < AudioContext.currentTime, clamp to AudioContext.currentTime; r=padenot MozReview-Commit-ID: ImnxgOiIdnG
dom/media/webaudio/AudioParam.h
testing/web-platform/meta/MANIFEST.json
testing/web-platform/tests/webaudio/the-audio-api/the-audioparam-interface/retrospective-exponentialRampToValueAtTime.html
testing/web-platform/tests/webaudio/the-audio-api/the-audioparam-interface/retrospective-linearRampToValueAtTime.html
testing/web-platform/tests/webaudio/the-audio-api/the-audioparam-interface/retrospective-setTargetAtTime.html
testing/web-platform/tests/webaudio/the-audio-api/the-audioparam-interface/retrospective-setValueAtTime.html
testing/web-platform/tests/webaudio/the-audio-api/the-audioparam-interface/retrospective-setValueCurveAtTime.html
--- a/dom/media/webaudio/AudioParam.h
+++ b/dom/media/webaudio/AudioParam.h
@@ -49,16 +49,17 @@ public:
                                   ErrorResult& aRv)
   {
     if (!WebAudioUtils::IsTimeValid(aStartTime)) {
       aRv.Throw(NS_ERROR_DOM_NOT_SUPPORTED_ERR);
       return this;
     }
     aValues.ComputeLengthAndData();
 
+    aStartTime = std::max(aStartTime, GetParentObject()->CurrentTime());
     EventInsertionHelper(aRv, AudioTimelineEvent::SetValueCurve,
                          aStartTime, 0.0f, 0.0f, aDuration, aValues.Data(),
                          aValues.Length());
     return this;
   }
 
   void SetValue(float aValue)
   {
@@ -77,67 +78,73 @@ public:
   }
 
   AudioParam* SetValueAtTime(float aValue, double aStartTime, ErrorResult& aRv)
   {
     if (!WebAudioUtils::IsTimeValid(aStartTime)) {
       aRv.Throw(NS_ERROR_DOM_NOT_SUPPORTED_ERR);
       return this;
     }
+    aStartTime = std::max(aStartTime, GetParentObject()->CurrentTime());
     EventInsertionHelper(aRv, AudioTimelineEvent::SetValueAtTime,
                          aStartTime, aValue);
 
     return this;
   }
 
   AudioParam* LinearRampToValueAtTime(float aValue, double aEndTime,
                                       ErrorResult& aRv)
   {
     if (!WebAudioUtils::IsTimeValid(aEndTime)) {
       aRv.Throw(NS_ERROR_DOM_NOT_SUPPORTED_ERR);
       return this;
     }
+    aEndTime = std::max(aEndTime, GetParentObject()->CurrentTime());
     EventInsertionHelper(aRv, AudioTimelineEvent::LinearRamp, aEndTime, aValue);
     return this;
   }
 
   AudioParam* ExponentialRampToValueAtTime(float aValue, double aEndTime,
                                            ErrorResult& aRv)
   {
     if (!WebAudioUtils::IsTimeValid(aEndTime)) {
       aRv.Throw(NS_ERROR_DOM_NOT_SUPPORTED_ERR);
       return this;
     }
+    aEndTime = std::max(aEndTime, GetParentObject()->CurrentTime());
     EventInsertionHelper(aRv, AudioTimelineEvent::ExponentialRamp,
                          aEndTime, aValue);
     return this;
   }
 
   AudioParam* SetTargetAtTime(float aTarget, double aStartTime,
                               double aTimeConstant, ErrorResult& aRv)
   {
     if (!WebAudioUtils::IsTimeValid(aStartTime) ||
         !WebAudioUtils::IsTimeValid(aTimeConstant)) {
       aRv.Throw(NS_ERROR_DOM_NOT_SUPPORTED_ERR);
       return this;
     }
+    aStartTime = std::max(aStartTime, GetParentObject()->CurrentTime());
     EventInsertionHelper(aRv, AudioTimelineEvent::SetTarget,
                          aStartTime, aTarget,
                          aTimeConstant);
 
     return this;
   }
 
   AudioParam* CancelScheduledValues(double aStartTime, ErrorResult& aRv)
   {
     if (!WebAudioUtils::IsTimeValid(aStartTime)) {
       aRv.Throw(NS_ERROR_DOM_NOT_SUPPORTED_ERR);
       return this;
     }
 
+    aStartTime = std::max(aStartTime, GetParentObject()->CurrentTime());
+
     // Remove some events on the main thread copy.
     AudioEventTimeline::CancelScheduledValues(aStartTime);
 
     AudioTimelineEvent event(AudioTimelineEvent::Cancel, aStartTime, 0.0f);
 
     SendEventToEngine(event);
 
     return this;
--- a/testing/web-platform/meta/MANIFEST.json
+++ b/testing/web-platform/meta/MANIFEST.json
@@ -38752,16 +38752,40 @@
           }
         ],
         "web-animations/interfaces/KeyframeEffectReadOnly/copy-contructor.html": [
           {
             "path": "web-animations/interfaces/KeyframeEffectReadOnly/copy-contructor.html",
             "url": "/web-animations/interfaces/KeyframeEffectReadOnly/copy-contructor.html"
           }
         ],
+        "webaudio/the-audio-api/the-audioparam-interface/retrospective-exponentialRampToValueAtTime.html": [
+          {
+            "path": "webaudio/the-audio-api/the-audioparam-interface/retrospective-exponentialRampToValueAtTime.html",
+            "url": "/webaudio/the-audio-api/the-audioparam-interface/retrospective-exponentialRampToValueAtTime.html"
+          }
+        ],
+        "webaudio/the-audio-api/the-audioparam-interface/retrospective-linearRampToValueAtTime.html": [
+          {
+            "path": "webaudio/the-audio-api/the-audioparam-interface/retrospective-linearRampToValueAtTime.html",
+            "url": "/webaudio/the-audio-api/the-audioparam-interface/retrospective-linearRampToValueAtTime.html"
+          }
+        ],
+        "webaudio/the-audio-api/the-audioparam-interface/retrospective-setTargetAtTime.html": [
+          {
+            "path": "webaudio/the-audio-api/the-audioparam-interface/retrospective-setTargetAtTime.html",
+            "url": "/webaudio/the-audio-api/the-audioparam-interface/retrospective-setTargetAtTime.html"
+          }
+        ],
+        "webaudio/the-audio-api/the-audioparam-interface/retrospective-setValueCurveAtTime.html": [
+          {
+            "path": "webaudio/the-audio-api/the-audioparam-interface/retrospective-setValueCurveAtTime.html",
+            "url": "/webaudio/the-audio-api/the-audioparam-interface/retrospective-setValueCurveAtTime.html"
+          }
+        ],
         "webaudio/the-audio-api/the-constantsourcenode-interface/test-constantsourcenode.html": [
           {
             "path": "webaudio/the-audio-api/the-constantsourcenode-interface/test-constantsourcenode.html",
             "url": "/webaudio/the-audio-api/the-constantsourcenode-interface/test-constantsourcenode.html"
           }
         ]
       }
     },
new file mode 100644
--- /dev/null
+++ b/testing/web-platform/tests/webaudio/the-audio-api/the-audioparam-interface/retrospective-exponentialRampToValueAtTime.html
@@ -0,0 +1,51 @@
+<!doctype html>
+<meta charset=utf-8>
+<title>Test exponentialRampToValue with end time in the past</title>
+<script src=/resources/testharness.js></script>
+<script src=/resources/testharnessreport.js></script>
+<script>
+function do_test(t, context) {
+  var source = context.createConstantSource();
+  source.start();
+
+  var test = context.createGain();
+  test.gain.exponentialRampToValueAtTime(0.1, 0.5*context.currentTime);
+  test.gain.exponentialRampToValueAtTime(0.9, 2.0);
+
+  var reference = context.createGain();
+  reference.gain.exponentialRampToValueAtTime(0.1, context.currentTime);
+  reference.gain.exponentialRampToValueAtTime(0.9, 2.0);
+
+  source.connect(test);
+  source.connect(reference);
+
+  var merger = context.createChannelMerger();
+  test.connect(merger, 0, 0);
+  reference.connect(merger, 0, 1);
+
+  var processor = context.createScriptProcessor(0, 2, 0);
+  merger.connect(processor);
+  processor.onaudioprocess =
+    t.step_func_done((e) => {
+      source.stop();
+      processor.onaudioprocess = null;
+
+      var testValue = e.inputBuffer.getChannelData(0)[0];
+      var referenceValue = e.inputBuffer.getChannelData(1)[0];
+
+      assert_equals(testValue, referenceValue,
+                        "value matches expected");
+    });
+}
+
+async_test(function(t) {
+  var context = new AudioContext;
+  (function waitForTimeAdvance() {
+    if (context.currentTime == 0) {
+      t.step_timeout(waitForTimeAdvance, 0);
+    } else {
+      do_test(t, context);
+    }
+  })();
+});
+</script>
new file mode 100644
--- /dev/null
+++ b/testing/web-platform/tests/webaudio/the-audio-api/the-audioparam-interface/retrospective-linearRampToValueAtTime.html
@@ -0,0 +1,51 @@
+<!doctype html>
+<meta charset=utf-8>
+<title>Test linearRampToValue with end time in the past</title>
+<script src=/resources/testharness.js></script>
+<script src=/resources/testharnessreport.js></script>
+<script>
+function do_test(t, context) {
+  var source = context.createConstantSource();
+  source.start();
+
+  var test = context.createGain();
+  test.gain.linearRampToValueAtTime(0.1, 0.5*context.currentTime);
+  test.gain.linearRampToValueAtTime(0.9, 2.0);
+
+  var reference = context.createGain();
+  reference.gain.linearRampToValueAtTime(0.1, context.currentTime);
+  reference.gain.linearRampToValueAtTime(0.9, 2.0);
+
+  source.connect(test);
+  source.connect(reference);
+
+  var merger = context.createChannelMerger();
+  test.connect(merger, 0, 0);
+  reference.connect(merger, 0, 1);
+
+  var processor = context.createScriptProcessor(0, 2, 0);
+  merger.connect(processor);
+  processor.onaudioprocess =
+    t.step_func_done((e) => {
+      source.stop();
+      processor.onaudioprocess = null;
+
+      var testValue = e.inputBuffer.getChannelData(0)[0];
+      var referenceValue = e.inputBuffer.getChannelData(1)[0];
+
+      assert_equals(testValue, referenceValue,
+                        "value matches expected");
+    });
+}
+
+async_test(function(t) {
+  var context = new AudioContext;
+  (function waitForTimeAdvance() {
+    if (context.currentTime == 0) {
+      t.step_timeout(waitForTimeAdvance, 0);
+    } else {
+      do_test(t, context);
+    }
+  })();
+});
+</script>
new file mode 100644
--- /dev/null
+++ b/testing/web-platform/tests/webaudio/the-audio-api/the-audioparam-interface/retrospective-setTargetAtTime.html
@@ -0,0 +1,51 @@
+<!doctype html>
+<meta charset=utf-8>
+<title>Test setTargetAtTime with start time in the past</title>
+<script src=/resources/testharness.js></script>
+<script src=/resources/testharnessreport.js></script>
+<script>
+function do_test(t, context) {
+  var source = context.createConstantSource();
+  source.start();
+
+  var test = context.createGain();
+  test.gain.setTargetAtTime(0.1, 0.5*context.currentTime, 0.1);
+  test.gain.linearRampToValueAtTime(0.9, 2.0);
+
+  var reference = context.createGain();
+  reference.gain.setTargetAtTime(0.1, context.currentTime, 0.1);
+  reference.gain.linearRampToValueAtTime(0.9, 2.0);
+
+  source.connect(test);
+  source.connect(reference);
+
+  var merger = context.createChannelMerger();
+  test.connect(merger, 0, 0);
+  reference.connect(merger, 0, 1);
+
+  var processor = context.createScriptProcessor(0, 2, 0);
+  merger.connect(processor);
+  processor.onaudioprocess =
+    t.step_func_done((e) => {
+      source.stop();
+      processor.onaudioprocess = null;
+
+      var testValue = e.inputBuffer.getChannelData(0)[0];
+      var referenceValue = e.inputBuffer.getChannelData(1)[0];
+
+      assert_equals(testValue, referenceValue,
+                        "value matches expected");
+    });
+}
+
+async_test(function(t) {
+  var context = new AudioContext;
+  (function waitForTimeAdvance() {
+    if (context.currentTime == 0) {
+      t.step_timeout(waitForTimeAdvance, 0);
+    } else {
+      do_test(t, context);
+    }
+  })();
+});
+</script>
--- a/testing/web-platform/tests/webaudio/the-audio-api/the-audioparam-interface/retrospective-setValueAtTime.html
+++ b/testing/web-platform/tests/webaudio/the-audio-api/the-audioparam-interface/retrospective-setValueAtTime.html
@@ -1,40 +1,27 @@
 <!DOCTYPE html>
 <title>Test setValueAtTime with startTime in the past</title>
 <script src="/resources/testharness.js"></script>
 <script src="/resources/testharnessreport.js"></script>
 <script>
 function do_test(t, context) {
-  var source = context.createBufferSource();
-  source.buffer =
-    function() {
-      var buffer = context.createBuffer(1, 1, context.sampleRate);
-      buffer.getChannelData(0)[0] = 1.0;
-      return buffer;
-    }();
-  source.loop = true;
+  var source = context.createConstantSource();
   source.start();
 
   // Use a ramp of slope 1/sample to measure time.
   // The end value is the extent of exact precision in single precision float.
   const rampEnd = Math.pow(2, 24);
   const rampEndSeconds = rampEnd / context.sampleRate;
   var test = context.createGain();
-  test.gain.setValueAtTime(0.0, 0.0);
+  test.gain.setValueAtTime(0.0, 0.5*context.currentTime);
   test.gain.linearRampToValueAtTime(rampEnd, rampEndSeconds);
 
-  // With a different starting point on the same line, the result should be
-  // the same.  |currentTime| may include double precision floating point
-  // rounding errors, so round to nearest integer sample to ignore these.
-  var scheduledSample = Math.round(context.currentTime * context.sampleRate);
-  assert_equals(scheduledSample % 128, 0,
-                "currentTime advances in blocks of 128 samples");
   var reference = context.createGain();
-  reference.gain.setValueAtTime(scheduledSample, context.currentTime);
+  reference.gain.setValueAtTime(0.0, context.currentTime);
   reference.gain.linearRampToValueAtTime(rampEnd, rampEndSeconds);
 
   source.connect(test);
   source.connect(reference);
 
   var merger = context.createChannelMerger();
   test.connect(merger, 0, 0);
   reference.connect(merger, 0, 1);
@@ -45,21 +32,17 @@ function do_test(t, context) {
     t.step_func_done((e) => {
       source.stop();
       processor.onaudioprocess = null;
 
       var testValue = e.inputBuffer.getChannelData(0)[0];
       var referenceValue = e.inputBuffer.getChannelData(1)[0];
 
       assert_equals(testValue, referenceValue,
-                    "ramp value matches expected");
-      assert_greater_than_equal(testValue, scheduledSample,
-                                "time does not retreat");
-      assert_equals(testValue % 128, 0,
-                    "ScriptProcessor blocks align on 128-sample blocks");
+                        "ramp value matches expected");
     });
 }
 
 async_test(function(t) {
   var context = new AudioContext;
   (function waitForTimeAdvance() {
     if (context.currentTime == 0) {
       t.step_timeout(waitForTimeAdvance, 0);
new file mode 100644
--- /dev/null
+++ b/testing/web-platform/tests/webaudio/the-audio-api/the-audioparam-interface/retrospective-setValueCurveAtTime.html
@@ -0,0 +1,49 @@
+<!doctype html>
+<meta charset=utf-8>
+<title>Test SetValueCurve with start time in the past</title>
+<script src=/resources/testharness.js></script>
+<script src=/resources/testharnessreport.js></script>
+<script>
+function do_test(t, context) {
+  var source = context.createConstantSource();
+  source.start();
+
+  var test = context.createGain();
+  test.gain.setValueCurveAtTime(new Float32Array([1.0, 0.1]), 0.0, 1.0);
+
+  var reference = context.createGain();
+  reference.gain.setValueCurveAtTime(new Float32Array([1.0, 0.1]), 0.5*context.currentTime, 1.0);
+
+  source.connect(test);
+  source.connect(reference);
+
+  var merger = context.createChannelMerger();
+  test.connect(merger, 0, 0);
+  reference.connect(merger, 0, 1);
+
+  var processor = context.createScriptProcessor(0, 2, 0);
+  merger.connect(processor);
+  processor.onaudioprocess =
+    t.step_func_done((e) => {
+      source.stop();
+      processor.onaudioprocess = null;
+
+      var testValue = e.inputBuffer.getChannelData(0)[0];
+      var referenceValue = e.inputBuffer.getChannelData(1)[0];
+
+      assert_equals(testValue, referenceValue,
+                        "value matches expected");
+    });
+}
+
+async_test(function(t) {
+  var context = new AudioContext;
+  (function waitForTimeAdvance() {
+    if (context.currentTime == 0) {
+      t.step_timeout(waitForTimeAdvance, 0);
+    } else {
+      do_test(t, context);
+    }
+  })();
+});
+</script>