Bug 1056458 - Add backend for recording automation events in the web audio API for developer tools. r=vp
authorJordan Santell <jsantell@gmail.com>
Sat, 27 Dec 2014 09:10:00 +0100
changeset 221460 6bf93610034bb88c0ac64dc56948e557f26f005d
parent 221459 45e2731926803cf9020bbc46ddec11e3ce103b6b
child 221461 199930d6b2f7f3804d34552efc6a9083807c7332
child 221553 bb24f0be1bcbc8cb04aa0bd4cabd003445a46e13
push id53352
push usercbook@mozilla.com
push dateMon, 29 Dec 2014 15:17:49 +0000
treeherdermozilla-inbound@8530f5b7443b [default view] [failures only]
perfherder[talos] [build metrics] [platform microbench] (compared to previous push)
reviewersvp
bugs1056458
milestone37.0a1
first release with
nightly linux32
nightly linux64
nightly mac
nightly win32
nightly win64
last release without
nightly linux32
nightly linux64
nightly mac
nightly win32
nightly win64
Bug 1056458 - Add backend for recording automation events in the web audio API for developer tools. r=vp
browser/devtools/webaudioeditor/test/browser.ini
browser/devtools/webaudioeditor/test/browser_audionode-actor-add-automation-event.js
browser/devtools/webaudioeditor/test/browser_audionode-actor-get-automation-data-01.js
browser/devtools/webaudioeditor/test/browser_audionode-actor-get-automation-data-02.js
browser/devtools/webaudioeditor/test/browser_webaudio-actor-automation-event.js
browser/devtools/webaudioeditor/test/doc_automation.html
browser/devtools/webaudioeditor/test/head.js
toolkit/devtools/server/actors/webaudio.js
--- a/browser/devtools/webaudioeditor/test/browser.ini
+++ b/browser/devtools/webaudioeditor/test/browser.ini
@@ -5,31 +5,36 @@ support-files =
   doc_complex-context.html
   doc_simple-node-creation.html
   doc_buffer-and-array.html
   doc_media-node-creation.html
   doc_destroy-nodes.html
   doc_connect-param.html
   doc_connect-multi-param.html
   doc_iframe-context.html
+  doc_automation.html
   440hz_sine.ogg
   head.js
 
 [browser_audionode-actor-get-param-flags.js]
 [browser_audionode-actor-get-params-01.js]
 [browser_audionode-actor-get-params-02.js]
 [browser_audionode-actor-get-set-param.js]
 [browser_audionode-actor-get-type.js]
 [browser_audionode-actor-is-source.js]
 [browser_audionode-actor-bypass.js]
 [browser_audionode-actor-connectnode-disconnect.js]
 [browser_audionode-actor-connectparam.js]
+[browser_audionode-actor-add-automation-event.js]
+[browser_audionode-actor-get-automation-data-01.js]
+[browser_audionode-actor-get-automation-data-02.js]
 [browser_webaudio-actor-simple.js]
 [browser_webaudio-actor-destroy-node.js]
 [browser_webaudio-actor-connect-param.js]
+[browser_webaudio-actor-automation-event.js]
 
 [browser_wa_destroy-node-01.js]
 
 [browser_wa_first-run.js]
 [browser_wa_reset-01.js]
 [browser_wa_reset-02.js]
 [browser_wa_reset-03.js]
 [browser_wa_reset-04.js]
new file mode 100644
--- /dev/null
+++ b/browser/devtools/webaudioeditor/test/browser_audionode-actor-add-automation-event.js
@@ -0,0 +1,52 @@
+/* Any copyright is dedicated to the Public Domain.
+   http://creativecommons.org/publicdomain/zero/1.0/ */
+
+/**
+ * Test AudioNode#addAutomationEvent();
+ */
+
+add_task(function*() {
+  let { target, front } = yield initBackend(SIMPLE_CONTEXT_URL);
+  let [_, [destNode, oscNode, gainNode]] = yield Promise.all([
+    front.setup({ reload: true }),
+    get3(front, "create-node")
+  ]);
+  let count = 0;
+  let counter = () => count++;
+  front.on("automation-event", counter);
+
+  let t0 = 0, t1 = 0.1, t2 = 0.2, t3 = 0.3, t4 = 0.4, t5 = 0.6, t6 = 0.7, t7 = 1;
+  let curve = [-1, 0, 1];
+  yield oscNode.addAutomationEvent("frequency", "setValueAtTime", [0.2, t0]);
+  yield oscNode.addAutomationEvent("frequency", "setValueAtTime", [0.3, t1]);
+  yield oscNode.addAutomationEvent("frequency", "setValueAtTime", [0.4, t2]);
+  yield oscNode.addAutomationEvent("frequency", "linearRampToValueAtTime", [1, t3]);
+  yield oscNode.addAutomationEvent("frequency", "linearRampToValueAtTime", [0.15, t4]);
+  yield oscNode.addAutomationEvent("frequency", "exponentialRampToValueAtTime", [0.75, t5]);
+  yield oscNode.addAutomationEvent("frequency", "exponentialRampToValueAtTime", [0.5, t6]);
+  yield oscNode.addAutomationEvent("frequency", "setValueCurveAtTime", [curve, t7, t7 - t6]);
+  yield oscNode.addAutomationEvent("frequency", "setTargetAtTime", [20, 2, 5]);
+
+  ok(true, "successfully set automation events for valid automation events");
+
+  try {
+    yield oscNode.addAutomationEvent("frequency", "notAMethod", 20, 2, 5);
+    ok(false, "non-automation methods should not be successful");
+  } catch (e) {
+    ok(/invalid/.test(e.message), "AudioNode:addAutomationEvent fails for invalid automation methods");
+  }
+
+  try {
+    yield oscNode.addAutomationEvent("invalidparam", "setValueAtTime", 0.2, t0);
+    ok(false, "automating non-AudioParams should not be successful");
+  } catch (e) {
+    ok(/invalid/.test(e.message), "AudioNode:addAutomationEvent fails for a non AudioParam");
+  }
+
+  front.off("automation-event", counter);
+
+  is(count, 9,
+    "when calling `addAutomationEvent`, the WebAudioActor should still fire `automation-event`.");
+
+  yield removeTab(target.tab);
+});
new file mode 100644
--- /dev/null
+++ b/browser/devtools/webaudioeditor/test/browser_audionode-actor-get-automation-data-01.js
@@ -0,0 +1,53 @@
+/* Any copyright is dedicated to the Public Domain.
+   http://creativecommons.org/publicdomain/zero/1.0/ */
+
+/**
+ * Test AudioNode#addAutomationEvent() checking automation values, also using
+ * a curve as the last event to check duration spread.
+ */
+
+add_task(function*() {
+  let { target, front } = yield initBackend(SIMPLE_CONTEXT_URL);
+  let [_, [destNode, oscNode, gainNode]] = yield Promise.all([
+    front.setup({ reload: true }),
+    get3(front, "create-node")
+  ]);
+
+  let t0 = 0, t1 = 0.1, t2 = 0.2, t3 = 0.3, t4 = 0.4, t5 = 0.6, t6 = 0.7, t7 = 1;
+  let curve = [-1, 0, 1];
+  yield oscNode.addAutomationEvent("frequency", "setValueAtTime", [0.2, t0]);
+  yield oscNode.addAutomationEvent("frequency", "setValueAtTime", [0.3, t1]);
+  yield oscNode.addAutomationEvent("frequency", "setValueAtTime", [0.4, t2]);
+  yield oscNode.addAutomationEvent("frequency", "linearRampToValueAtTime", [1, t3]);
+  yield oscNode.addAutomationEvent("frequency", "linearRampToValueAtTime", [0.15, t4]);
+  yield oscNode.addAutomationEvent("frequency", "exponentialRampToValueAtTime", [0.75, t5]);
+  yield oscNode.addAutomationEvent("frequency", "exponentialRampToValueAtTime", [0.05, t6]);
+  // End with a curve here so we can get proper results on the last event (which takes into account
+  // duration)
+  yield oscNode.addAutomationEvent("frequency", "setValueCurveAtTime", [curve, t6, t7 - t6]);
+
+  let { events, values } = yield oscNode.getAutomationData("frequency");
+
+  is(events.length, 8, "8 recorded events returned.");
+  is(values.length, 2000, "2000 value points returned.");
+
+  checkAutomationValue(values, 0.05, 0.2);
+  checkAutomationValue(values, 0.1, 0.3);
+  checkAutomationValue(values, 0.15, 0.3);
+  checkAutomationValue(values, 0.2, 0.4);
+  checkAutomationValue(values, 0.25, 0.7);
+  checkAutomationValue(values, 0.3, 1);
+  checkAutomationValue(values, 0.35, 0.575);
+  checkAutomationValue(values, 0.4, 0.15);
+  checkAutomationValue(values, 0.45, 0.15 * Math.pow(0.75/0.15,0.05/0.2));
+  checkAutomationValue(values, 0.5, 0.15 * Math.pow(0.75/0.15,0.5));
+  checkAutomationValue(values, 0.55, 0.15 * Math.pow(0.75/0.15,0.15/0.2));
+  checkAutomationValue(values, 0.6, 0.75);
+  checkAutomationValue(values, 0.65, 0.75 * Math.pow(0.05/0.75, 0.5));
+  checkAutomationValue(values, 0.705, -1); // Increase this time a bit to prevent off by the previous exponential amount
+  checkAutomationValue(values, 0.8, 0);
+  checkAutomationValue(values, 0.9, 1);
+  checkAutomationValue(values, 1, 1);
+
+  yield removeTab(target.tab);
+});
new file mode 100644
--- /dev/null
+++ b/browser/devtools/webaudioeditor/test/browser_audionode-actor-get-automation-data-02.js
@@ -0,0 +1,38 @@
+/* Any copyright is dedicated to the Public Domain.
+   http://creativecommons.org/publicdomain/zero/1.0/ */
+
+/**
+ * Test AudioNode#addAutomationEvent() when automation series ends with
+ * `setTargetAtTime`, which approaches its target to infinity.
+ */
+
+add_task(function*() {
+  let { target, front } = yield initBackend(SIMPLE_CONTEXT_URL);
+  let [_, [destNode, oscNode, gainNode]] = yield Promise.all([
+    front.setup({ reload: true }),
+    get3(front, "create-node")
+  ]);
+
+  let t0 = 0, t1 = 0.1, t2 = 0.2, t3 = 0.3, t4 = 0.4, t5 = 0.6, t6 = 0.7, t7 = 1;
+  yield oscNode.addAutomationEvent("frequency", "setValueAtTime", [0.2, t0]);
+  yield oscNode.addAutomationEvent("frequency", "setValueAtTime", [0.3, t1]);
+  yield oscNode.addAutomationEvent("frequency", "setValueAtTime", [0.4, t2]);
+  yield oscNode.addAutomationEvent("frequency", "linearRampToValueAtTime", [1, t3]);
+  yield oscNode.addAutomationEvent("frequency", "linearRampToValueAtTime", [0.15, t4]);
+  yield oscNode.addAutomationEvent("frequency", "exponentialRampToValueAtTime", [0.75, t5]);
+  yield oscNode.addAutomationEvent("frequency", "exponentialRampToValueAtTime", [0.05, t6]);
+  // End with a setTargetAtTime event, as the target approaches infinity, which will
+  // give us more points to render than the default 2000
+  yield oscNode.addAutomationEvent("frequency", "setTargetAtTime", [1, t7, 0.5]);
+
+  let { events, values } = yield oscNode.getAutomationData("frequency");
+
+  is(events.length, 8, "8 recorded events returned.");
+  is(values.length, 4000, "6000 value points returned when ending with exponentiall approaching automator.");
+
+  checkAutomationValue(values, 1, 0.05);
+  checkAutomationValue(values, 2, 0.87);
+  checkAutomationValue(values, 3, 0.98);
+
+  yield removeTab(target.tab);
+});
new file mode 100644
--- /dev/null
+++ b/browser/devtools/webaudioeditor/test/browser_webaudio-actor-automation-event.js
@@ -0,0 +1,51 @@
+/* Any copyright is dedicated to the Public Domain.
+   http://creativecommons.org/publicdomain/zero/1.0/ */
+
+/**
+ * Test that the WebAudioActor receives and emits the `automation-event` events
+ * with correct arguments from the content.
+ */
+
+add_task(function*() {
+  let { target, front } = yield initBackend(AUTOMATION_URL);
+  let events = [];
+
+  let expected = [
+    ["setValueAtTime", 0.2, 0],
+    ["linearRampToValueAtTime", 1, 0.3],
+    ["exponentialRampToValueAtTime", 0.75, 0.6],
+    ["setValueCurveAtTime", [-1, 0 ,1], 0.7, 0.3],
+  ];
+
+  front.on("automation-event", onAutomationEvent);
+
+  let [_, __, [destNode, oscNode, gainNode], [connect1, connect2]] = yield Promise.all([
+    front.setup({ reload: true }),
+    once(front, "start-context"),
+    get3(front, "create-node"),
+    get2(front, "connect-node")
+  ]);
+
+  is(events.length, 4, "correct number of events fired");
+
+  function onAutomationEvent (e) {
+    let { eventName, paramName, args } = e;
+    let exp = expected[events.length];
+
+    is(eventName, exp[0], "correct eventName in event");
+    is(paramName, "frequency", "correct paramName in event");
+    is(args.length, exp.length - 1, "correct length in args");
+    args.forEach((a, i) => {
+      // In the case of an array
+      if (typeof a === "object") {
+        a.forEach((f, j) => is(f, exp[i + 1][j], "correct argument in args"));
+      } else {
+        is(a, exp[i + 1], "correct argument in args");
+      }
+    });
+    events.push([eventName].concat(args));
+  }
+
+  front.off("automation-event", onAutomationEvent);
+  yield removeTab(target.tab);
+});
new file mode 100644
--- /dev/null
+++ b/browser/devtools/webaudioeditor/test/doc_automation.html
@@ -0,0 +1,30 @@
+<!-- Any copyright is dedicated to the Public Domain.
+     http://creativecommons.org/publicdomain/zero/1.0/ -->
+<!doctype html>
+
+<html>
+  <head>
+    <meta charset="utf-8"/>
+    <title>Web Audio Editor test page</title>
+  </head>
+
+  <body>
+
+    <script type="text/javascript;version=1.8">
+      "use strict";
+
+      let ctx = new AudioContext();
+      let osc = ctx.createOscillator();
+      let gain = ctx.createGain();
+      gain.gain.value = 0;
+      osc.frequency.setValueAtTime(0.2, 0);
+      osc.frequency.linearRampToValueAtTime(1, 0.3);
+      osc.frequency.exponentialRampToValueAtTime(0.75, 0.6);
+      osc.frequency.setValueCurveAtTime(new Float32Array([-1, 0, 1]), 0.7, 0.3);
+      osc.connect(gain);
+      gain.connect(ctx.destination);
+      osc.start(0);
+    </script>
+  </body>
+
+</html>
--- a/browser/devtools/webaudioeditor/test/head.js
+++ b/browser/devtools/webaudioeditor/test/head.js
@@ -4,17 +4,17 @@
 
 const { classes: Cc, interfaces: Ci, utils: Cu, results: Cr } = Components;
 
 let { Services } = Cu.import("resource://gre/modules/Services.jsm", {});
 
 // Enable logging for all the tests. Both the debugger server and frontend will
 // be affected by this pref.
 let gEnableLogging = Services.prefs.getBoolPref("devtools.debugger.log");
-Services.prefs.setBoolPref("devtools.debugger.log", true);
+Services.prefs.setBoolPref("devtools.debugger.log", false);
 
 let { Task } = Cu.import("resource://gre/modules/Task.jsm", {});
 let { Promise } = Cu.import("resource://gre/modules/Promise.jsm", {});
 let { gDevTools } = Cu.import("resource:///modules/devtools/gDevTools.jsm", {});
 let { devtools } = Cu.import("resource://gre/modules/devtools/Loader.jsm", {});
 let { DebuggerServer } = Cu.import("resource://gre/modules/devtools/dbg-server.jsm", {});
 
 let { WebAudioFront } = devtools.require("devtools/server/actors/webaudio");
@@ -25,16 +25,17 @@ const SIMPLE_CONTEXT_URL = EXAMPLE_URL +
 const COMPLEX_CONTEXT_URL = EXAMPLE_URL + "doc_complex-context.html";
 const SIMPLE_NODES_URL = EXAMPLE_URL + "doc_simple-node-creation.html";
 const MEDIA_NODES_URL = EXAMPLE_URL + "doc_media-node-creation.html";
 const BUFFER_AND_ARRAY_URL = EXAMPLE_URL + "doc_buffer-and-array.html";
 const DESTROY_NODES_URL = EXAMPLE_URL + "doc_destroy-nodes.html";
 const CONNECT_PARAM_URL = EXAMPLE_URL + "doc_connect-param.html";
 const CONNECT_MULTI_PARAM_URL = EXAMPLE_URL + "doc_connect-multi-param.html";
 const IFRAME_CONTEXT_URL = EXAMPLE_URL + "doc_iframe-context.html";
+const AUTOMATION_URL = EXAMPLE_URL + "doc_automation.html";
 
 // All tests are asynchronous.
 waitForExplicitFinish();
 
 let gToolEnabled = Services.prefs.getBoolPref("devtools.webaudioeditor.enabled");
 
 gDevTools.testing = true;
 
@@ -389,16 +390,46 @@ function countGraphObjects (win) {
 */
 function forceCC () {
   SpecialPowers.DOMWindowUtils.cycleCollect();
   SpecialPowers.DOMWindowUtils.garbageCollect();
   SpecialPowers.DOMWindowUtils.garbageCollect();
 }
 
 /**
+ * Takes a `values` array of automation value entries,
+ * looking for the value at `time` seconds, checking
+ * to see if the value is close to `expected`.
+ */
+function checkAutomationValue (values, time, expected) {
+  // Remain flexible on values as we can approximate points
+  let EPSILON = 0.01;
+
+  let value = getValueAt(values, time);
+  ok(Math.abs(value - expected) < EPSILON, "Timeline value at " + time + " with value " + value + " should have value very close to " + expected);
+
+  /**
+   * Entries are ordered in `values` according to time, so if we can't find an exact point
+   * on a time of interest, return the point in between the threshold. This should
+   * get us a very close value.
+   */
+  function getValueAt (values, time) {
+    for (let i = 0; i < values.length; i++) {
+      if (values[i].t === time) {
+        return values[i].value;
+      }
+      if (values[i].t > time) {
+        return (values[i - 1].value + values[i].value) / 2;
+      }
+    }
+    return values[values.length - 1].value;
+  }
+}
+
+/**
  * List of audio node properties to test against expectations of the AudioNode actor
  */
 
 const NODE_DEFAULT_VALUES = {
   "AudioDestinationNode": {},
   "MediaElementAudioSourceNode": {},
   "MediaStreamAudioSourceNode": {},
   "MediaStreamAudioDestinationNode": {
--- a/toolkit/devtools/server/actors/webaudio.js
+++ b/toolkit/devtools/server/actors/webaudio.js
@@ -8,32 +8,41 @@ const {Cc, Ci, Cu, Cr} = require("chrome
 const Services = require("Services");
 
 const { Promise: promise } = Cu.import("resource://gre/modules/Promise.jsm", {});
 const events = require("sdk/event/core");
 const { on: systemOn, off: systemOff } = require("sdk/system/events");
 const protocol = require("devtools/server/protocol");
 const { CallWatcherActor, CallWatcherFront } = require("devtools/server/actors/call-watcher");
 const { ThreadActor } = require("devtools/server/actors/script");
+const AutomationTimeline = require("./utils/automation-timeline");
 
 const { on, once, off, emit } = events;
 const { types, method, Arg, Option, RetVal } = protocol;
 
+const AUTOMATION_GRANULARITY = 2000;
+const AUTOMATION_GRANULARITY_MAX = 6000;
+
 const AUDIO_GLOBALS = [
-  "AudioContext", "AudioNode"
+  "AudioContext", "AudioNode", "AudioParam"
 ];
 
 const NODE_CREATION_METHODS = [
   "createBufferSource", "createMediaElementSource", "createMediaStreamSource",
   "createMediaStreamDestination", "createScriptProcessor", "createAnalyser",
   "createGain", "createDelay", "createBiquadFilter", "createWaveShaper",
   "createPanner", "createConvolver", "createChannelSplitter", "createChannelMerger",
   "createDynamicsCompressor", "createOscillator"
 ];
 
+const AUTOMATION_METHODS = [
+  "setValueAtTime", "linearRampToValueAtTime", "exponentialRampToValueAtTime",
+  "setTargetAtTime", "setValueCurveAtTime"
+];
+
 const NODE_ROUTING_METHODS = [
   "connect", "disconnect"
 ];
 
 const NODE_PROPERTIES = {
   "OscillatorNode": {
     "type": {},
     "frequency": {},
@@ -125,21 +134,31 @@ let AudioNodeActor = exports.AudioNodeAc
     protocol.Actor.prototype.initialize.call(this, conn);
 
     // Store ChromeOnly property `id` to identify AudioNode,
     // rather than storing a strong reference, and store a weak
     // ref to underlying node for controlling.
     this.nativeID = node.id;
     this.node = Cu.getWeakReference(node);
 
+    // Stores the AutomationTimelines for this node's AudioParams.
+    this.automation = {};
+
     try {
       this.type = getConstructorName(node);
     } catch (e) {
       this.type = "";
     }
+
+    // Create automation timelines for all AudioParams
+    Object.keys(NODE_PROPERTIES[this.type])
+      .filter(isAudioParam.bind(null, node))
+      .forEach(paramName => {
+        this.automation[paramName] = new AutomationTimeline(node[paramName].defaultValue);
+      });
   },
 
   /**
    * Returns the name of the audio type.
    * Examples: "OscillatorNode", "MediaElementAudioSourceNode"
    */
   getType: method(function () {
     return this.type;
@@ -207,20 +226,23 @@ let AudioNodeActor = exports.AudioNodeAc
   setParam: method(function (param, value) {
     let node = this.node.get();
 
     if (node === null) {
       return CollectedAudioNodeError();
     }
 
     try {
-      if (isAudioParam(node, param))
+      if (isAudioParam(node, param)) {
         node[param].value = value;
-      else
+        this.automation[param].setValue(value);
+      }
+      else {
         node[param] = value;
+      }
       return undefined;
     } catch (e) {
       return constructError(e);
     }
   }, {
     request: {
       param: Arg(0, "string"),
       value: Arg(1, "nullable:primitive")
@@ -279,17 +301,17 @@ let AudioNodeActor = exports.AudioNodeAc
     request: { param: Arg(0, "string") },
     response: { flags: RetVal("nullable:primitive") }
   }),
 
   /**
    * Get an array of objects each containing a `param` and `value` property,
    * corresponding to a property name and current value of the audio node.
    */
-  getParams: method(function (param) {
+  getParams: method(function () {
     let props = Object.keys(NODE_PROPERTIES[this.type]);
     return props.map(prop =>
       ({ param: prop, value: this.getParam(prop), flags: this.getParamFlags(prop) }));
   }, {
     response: { params: RetVal("json") }
   }),
 
   /**
@@ -367,18 +389,143 @@ let AudioNodeActor = exports.AudioNodeAc
       // patched method that fires the webaudio actor's `disconnect` event.
       XPCNativeWrapper.unwrap(node).disconnect(output);
     } catch (e) {
       return constructError(e);
     }
   }, {
     request: { output: Arg(0, "nullable:number") },
     response: { error: RetVal("nullable:json") }
-  })
+  }),
+
+  getAutomationData: method(function (paramName) {
+    let timeline = this.automation[paramName];
+    let events = timeline.events;
+    let values = [];
+    let i = 0;
+
+    if (!timeline) {
+      return null;
+    }
+
+    if (!timeline.events.length) {
+      return { events, values };
+    }
+
+    let firstEvent = events[0];
+    let lastEvent = events[timeline.events.length - 1];
+    // `setValueCurveAtTime` will have a duration value -- other
+    // events will have duration of `0`.
+    let timeDelta = (lastEvent.time + lastEvent.duration) - firstEvent.time;
+    let scale = timeDelta / AUTOMATION_GRANULARITY;
+
+    for (; i < AUTOMATION_GRANULARITY; i++) {
+      let t = firstEvent.time + (i * scale);
+      let value = timeline.getValueAtTime(t);
+      values.push({ t, value });
+    }
+
+    // If the last event is setTargetAtTime, the automation
+    // doesn't actually begin until the event's time, and exponentially
+    // approaches the target value. In this case, we add more values
+    // until we're "close enough" to the target.
+    if (lastEvent.type === "setTargetAtTime") {
+      for (; i < AUTOMATION_GRANULARITY_MAX; i++) {
+        let t = firstEvent.time + (++i * scale);
+        let value = timeline.getValueAtTime(t);
+        values.push({ t, value });
+      }
+    }
+
+    return { events, values };
+  }, {
+    request: { paramName: Arg(0, "string") },
+    response: { values: RetVal("nullable:json") }
+  }),
+
+  /**
+   * Called via WebAudioActor, registers an automation event
+   * for the AudioParam called.
+   *
+   * @param String paramName
+   *        Name of the AudioParam.
+   * @param String eventName
+   *        Name of the automation event called.
+   * @param Array args
+   *        Arguments passed into the automation call.
+   */
+  addAutomationEvent: method(function (paramName, eventName, args=[]) {
+    let node = this.node.get();
+    let timeline = this.automation[paramName];
 
+    if (node === null) {
+      return CollectedAudioNodeError();
+    }
+
+    if (!timeline || !node[paramName][eventName]) {
+      return InvalidCommandError();
+    }
+
+    try {
+      // Using the unwrapped node and parameter, the corresponding
+      // WebAudioActor event will be fired, subsequently calling
+      // `_recordAutomationEvent`. Some finesse is required to handle
+      // the cast of TypedArray arguments over the protocol, which is
+      // taken care of below. The event will cast the argument back
+      // into an array to be broadcasted from WebAudioActor, but the
+      // double-casting will only occur when starting from `addAutomationEvent`,
+      // which is only used in tests.
+      let param = XPCNativeWrapper.unwrap(node[paramName]);
+
+      // If calling `setValueCurveAtTime`, the first argument
+      // is a Float32Array, which won't be able to be serialized
+      // over the protocol. Cast a normal array to a Float32Array here.
+      if (eventName === "setValueCurveAtTime") {
+        let contentGlobal = Cu.getGlobalForObject(param);
+        // Since we cannot iterate over and modify the actual Float32Array
+        // in the content, we'll have to pass in an array to the constructor
+        // from the same context, since we can iterate over non-TypedArrays.
+        let contentArray = copyInto(new contentGlobal.Array(), args[0]);
+
+        // Create a Float32Array from the content, seeding with an array
+        // from the same scope.
+        let curve = new contentGlobal.Float32Array(contentArray);
+        args[0] = curve;
+      }
+
+      param[eventName].apply(param, args);
+    } catch (e) {
+      return constructError(e);
+    }
+  }, {
+    request: {
+      paramName: Arg(0, "string"),
+      eventName: Arg(1, "string"),
+      args: Arg(2, "nullable:json")
+    },
+    response: { error: RetVal("nullable:json") }
+  }),
+
+  /**
+   * Registers the automation event in the AudioNodeActor's
+   * internal timeline. Called when setting automation via
+   * `addAutomationEvent`, or from the WebAudioActor's listening
+   * to the event firing via content.
+   *
+   * @param String paramName
+   *        Name of the AudioParam.
+   * @param String eventName
+   *        Name of the automation event called.
+   * @param Array args
+   *        Arguments passed into the automation call.
+   */
+  _recordAutomationEvent: function (paramName, eventName, args) {
+    let timeline = this.automation[paramName];
+    timeline[eventName].apply(timeline, args);
+  }
 });
 
 /**
  * The corresponding Front object for the AudioNodeActor.
  */
 let AudioNodeFront = protocol.FrontClass(AudioNodeActor, {
   initialize: function (client, form) {
     protocol.Front.prototype.initialize.call(this, client, form);
@@ -470,20 +617,23 @@ let WebAudioActor = exports.WebAudioActo
     // All Web Audio nodes inherit from AudioNode's prototype, so
     // hook into the `connect` and `disconnect` methods
     if (WebAudioFront.NODE_ROUTING_METHODS.has(name)) {
       this._handleRoutingCall(functionCall);
     }
     else if (WebAudioFront.NODE_CREATION_METHODS.has(name)) {
       this._handleCreationCall(functionCall);
     }
+    else if (WebAudioFront.AUTOMATION_METHODS.has(name)) {
+      this._handleAutomationCall(functionCall);
+    }
   },
 
   _handleRoutingCall: function(functionCall) {
-    let { caller, args, window, name } = functionCall.details;
+    let { caller, args, name } = functionCall.details;
     let source = caller;
     let dest = args[0];
     let isAudioParam = dest ? getConstructorName(dest) === "AudioParam" : false;
 
     // audionode.connect(param)
     if (name === "connect" && isAudioParam) {
       this._onConnectParam(source, dest);
     }
@@ -508,16 +658,36 @@ let WebAudioActor = exports.WebAudioActo
       // and trigger a `create-node` event for the context destination
       this._onStartContext();
       this._onCreateNode(caller.destination);
       this._firstNodeCreated = true;
     }
     this._onCreateNode(result);
   },
 
+  _handleAutomationCall: function (functionCall) {
+    let { caller, name, args } = functionCall.details;
+    let wrappedParam = new XPCNativeWrapper(caller);
+
+    // Sanitize arguments, as these should all be numbers,
+    // with the exception of a TypedArray, which needs
+    // casted to an Array
+    args = sanitizeAutomationArgs(args);
+
+    let nodeActor = this._getActorByNativeID(wrappedParam._parentID);
+    nodeActor._recordAutomationEvent(wrappedParam._paramName, name, args);
+
+    this._onAutomationEvent({
+      node: nodeActor,
+      paramName: wrappedParam._paramName,
+      eventName: name,
+      args: args
+    });
+  },
+
   /**
    * Stops listening for document global changes and puts this actor
    * to hibernation. This method is called automatically just before the
    * actor is destroyed.
    */
   finalize: method(function() {
     if (!this._initialized) {
       return;
@@ -566,16 +736,23 @@ let WebAudioActor = exports.WebAudioActo
     },
     "create-node": {
       type: "createNode",
       source: Arg(0, "audionode")
     },
     "destroy-node": {
       type: "destroyNode",
       source: Arg(0, "audionode")
+    },
+    "automation-event": {
+      type: "automationEvent",
+      node: Option(0, "audionode"),
+      paramName: Option(0, "string"),
+      eventName: Option(0, "string"),
+      args: Option(0, "json")
     }
   },
 
   /**
    * Helper for constructing an AudioNodeActor, assigning to
    * internal weak map, and tracking via `manage` so it is assigned
    * an `actorID`.
    */
@@ -707,16 +884,28 @@ let WebAudioActor = exports.WebAudioActo
    * Ensures that the new global has recording on
    * so we can proxy the function calls.
    */
   _onGlobalCreated: function () {
     this._callWatcher.resumeRecording();
   },
 
   /**
+   * Fired when an automation event is added to an AudioNode.
+   */
+  _onAutomationEvent: function ({node, paramName, eventName, args}) {
+    emit(this, "automation-event",  {
+      node: node,
+      paramName: paramName,
+      eventName: eventName,
+      args: args
+    });
+  },
+
+  /**
    * Called when the underlying ContentObserver fires `global-destroyed`
    * so we can cleanup some things between the global being destroyed and
    * when the actor's `finalize` method gets called.
    */
   _onGlobalDestroyed: function ({id}) {
     if (this._callWatcher._tracedWindowId !== id) {
       return;
     }
@@ -733,16 +922,17 @@ let WebAudioActor = exports.WebAudioActo
  */
 let WebAudioFront = exports.WebAudioFront = protocol.FrontClass(WebAudioActor, {
   initialize: function(client, { webaudioActor }) {
     protocol.Front.prototype.initialize.call(this, client, { actor: webaudioActor });
     this.manage(this);
   }
 });
 
+WebAudioFront.AUTOMATION_METHODS = new Set(AUTOMATION_METHODS);
 WebAudioFront.NODE_CREATION_METHODS = new Set(NODE_CREATION_METHODS);
 WebAudioFront.NODE_ROUTING_METHODS = new Set(NODE_ROUTING_METHODS);
 
 /**
  * Determines whether or not property is an AudioParam.
  *
  * @param AudioNode node
  *        An AudioNode.
@@ -776,16 +966,23 @@ function constructError (err) {
  */
 function CollectedAudioNodeError () {
   return {
     message: "AudioNode has been garbage collected and can no longer be reached.",
     type: "UnreachableAudioNode"
   };
 }
 
+function InvalidCommandError () {
+  return {
+    message: "The command on AudioNode is invalid.",
+    type: "InvalidCommand"
+  };
+}
+
 /**
  * Takes an object and converts it's `toString()` form, like
  * "[object OscillatorNode]" or "[object Float32Array]",
  * or XrayWrapper objects like "[object XrayWrapper [object Array]]"
  * to a string of just the constructor name, like "OscillatorNode",
  * or "Float32Array".
  */
 function getConstructorName (obj) {
@@ -802,8 +999,42 @@ function createObjectGrip (value) {
     type: "object",
     preview: {
       kind: "ObjectWithText",
       text: ""
     },
     class: getConstructorName(value)
   };
 }
+
+/**
+ * Converts all TypedArrays of the array that cannot
+ * be passed over the wire into a normal Array equivilent.
+ */
+function sanitizeAutomationArgs (args) {
+  return args.reduce((newArgs, el) => {
+    newArgs.push(typeof el === "object" && getConstructorName(el) === "Float32Array" ? castToArray(el) : el);
+    return newArgs;
+  }, []);
+}
+
+/**
+ * Casts TypedArray to a normal array via a
+ * new scope.
+ */
+function castToArray (typedArray) {
+  // The Xray machinery for TypedArrays denies indexed access on the grounds
+  // that it's slow, and advises callers to do a structured clone instead.
+  let global = Cu.getGlobalForObject(this);
+  let safeView = Cu.cloneInto(typedArray.subarray(), global);
+  return copyInto([], safeView);
+}
+
+/**
+ * Copies values of an array-like `source` into
+ * a similarly array-like `dest`.
+ */
+function copyInto (dest, source) {
+  for (let i = 0; i < source.length; i++) {
+    dest[i] = source[i];
+  }
+  return dest;
+}