Bug 1265732 - Decouple the AudioNodeFront and WebAudioFront from the AudioNodeActor and WebAudioActor respectively; r=ejpbruel
authorNick Fitzgerald <fitzgen@gmail.com>
Fri, 03 Jun 2016 10:45:10 -0700
changeset 339359 03b6ea8fded30783e73cc947b4b111e7746c12c3
parent 339358 c28f29acfc2a43c8cdcfbdae3c7da64166d030d9
child 339360 4bacacb4d1cf8ea75b75a3bd9933fce249fc4d74
push id6249
push userjlund@mozilla.com
push dateMon, 01 Aug 2016 13:59:36 +0000
treeherdermozilla-beta@bad9d4f5bf7e [default view] [failures only]
perfherder[talos] [build metrics] [platform microbench] (compared to previous push)
reviewersejpbruel
bugs1265732
milestone49.0a1
first release with
nightly linux32
nightly linux64
nightly mac
nightly win32
nightly win64
last release without
nightly linux32
nightly linux64
nightly mac
nightly win32
nightly win64
Bug 1265732 - Decouple the AudioNodeFront and WebAudioFront from the AudioNodeActor and WebAudioActor respectively; r=ejpbruel
devtools/client/framework/test/browser_target_support.js
devtools/client/webaudioeditor/panel.js
devtools/client/webaudioeditor/test/head.js
devtools/server/actors/webaudio.js
devtools/shared/fronts/moz.build
devtools/shared/fronts/webaudio.js
devtools/shared/specs/moz.build
devtools/shared/specs/webaudio.js
devtools/shared/webaudio.js
--- a/devtools/client/framework/test/browser_target_support.js
+++ b/devtools/client/framework/test/browser_target_support.js
@@ -2,17 +2,17 @@
 /* vim: set ft=javascript ts=2 et sw=2 tw=80: */
 /* Any copyright is dedicated to the Public Domain.
  * http://creativecommons.org/publicdomain/zero/1.0/ */
 
 // Test support methods on Target, such as `hasActor`, `getActorDescription`,
 // `actorHasMethod` and `getTrait`.
 
 var { WebAudioFront } =
-  require("devtools/server/actors/webaudio");
+  require("devtools/shared/fronts/webaudio");
 
 function* testTarget(client, target) {
   yield target.makeRemote();
 
   is(target.hasActor("timeline"), true, "target.hasActor() true when actor exists.");
   is(target.hasActor("webaudio"), true, "target.hasActor() true when actor exists.");
   is(target.hasActor("notreal"), false, "target.hasActor() false when actor does not exist.");
   // Create a front to ensure the actor is loaded
--- a/devtools/client/webaudioeditor/panel.js
+++ b/devtools/client/webaudioeditor/panel.js
@@ -2,17 +2,17 @@
 /* vim: set ft=javascript ts=2 et sw=2 tw=80: */
 /* This Source Code Form is subject to the terms of the Mozilla Public
  * License, v. 2.0. If a copy of the MPL was not distributed with this
  * file, You can obtain one at http://mozilla.org/MPL/2.0/. */
 "use strict";
 
 const { Cc, Ci, Cu, Cr } = require("chrome");
 const EventEmitter = require("devtools/shared/event-emitter");
-const { WebAudioFront } = require("devtools/server/actors/webaudio");
+const { WebAudioFront } = require("devtools/shared/fronts/webaudio");
 var Promise = require("promise");
 
 function WebAudioEditorPanel(iframeWindow, toolbox) {
   this.panelWin = iframeWindow;
   this._toolbox = toolbox;
   this._destroyer = null;
 
   EventEmitter.decorate(this);
--- a/devtools/client/webaudioeditor/test/head.js
+++ b/devtools/client/webaudioeditor/test/head.js
@@ -9,17 +9,17 @@ var { Task } = require("devtools/shared/
 var Services = require("Services");
 var { gDevTools } = require("devtools/client/framework/devtools");
 var { TargetFactory } = require("devtools/client/framework/target");
 var { DebuggerServer } = require("devtools/server/main");
 var { generateUUID } = Cc["@mozilla.org/uuid-generator;1"].getService(Ci.nsIUUIDGenerator);
 
 var Promise = require("promise");
 var Services = require("Services");
-var { WebAudioFront } = require("devtools/server/actors/webaudio");
+var { WebAudioFront } = require("devtools/shared/fronts/webaudio");
 var DevToolsUtils = require("devtools/shared/DevToolsUtils");
 var audioNodes = require("devtools/server/actors/utils/audionodes.json");
 var mm = null;
 
 const FRAME_SCRIPT_UTILS_URL = "chrome://devtools/content/shared/frame-script-utils.js";
 const EXAMPLE_URL = "http://example.com/browser/devtools/client/webaudioeditor/test/";
 const SIMPLE_CONTEXT_URL = EXAMPLE_URL + "doc_simple-context.html";
 const COMPLEX_CONTEXT_URL = EXAMPLE_URL + "doc_complex-context.html";
--- a/devtools/server/actors/webaudio.js
+++ b/devtools/server/actors/webaudio.js
@@ -12,50 +12,38 @@ const promise = require("promise");
 const { on: systemOn, off: systemOff } = require("sdk/system/events");
 const protocol = require("devtools/shared/protocol");
 const { CallWatcherActor } = require("devtools/server/actors/call-watcher");
 const { CallWatcherFront } = require("devtools/shared/fronts/call-watcher");
 const { createValueGrip } = require("devtools/server/actors/object");
 const AutomationTimeline = require("./utils/automation-timeline");
 const { on, once, off, emit } = events;
 const { types, method, Arg, Option, RetVal, preEvent } = protocol;
+const {
+  audionodeSpec,
+  webAudioSpec,
+  AUTOMATION_METHODS,
+  NODE_CREATION_METHODS,
+  NODE_ROUTING_METHODS,
+} = require("devtools/shared/specs/webaudio");
+const { WebAudioFront } = require("devtools/shared/fronts/webaudio");
 const AUDIO_NODE_DEFINITION = require("devtools/server/actors/utils/audionodes.json");
 const ENABLE_AUTOMATION = false;
 const AUTOMATION_GRANULARITY = 2000;
 const AUTOMATION_GRANULARITY_MAX = 6000;
 
 const AUDIO_GLOBALS = [
   "AudioContext", "AudioNode", "AudioParam"
 ];
 
-const NODE_CREATION_METHODS = [
-  "createBufferSource", "createMediaElementSource", "createMediaStreamSource",
-  "createMediaStreamDestination", "createScriptProcessor", "createAnalyser",
-  "createGain", "createDelay", "createBiquadFilter", "createWaveShaper",
-  "createPanner", "createConvolver", "createChannelSplitter", "createChannelMerger",
-  "createDynamicsCompressor", "createOscillator", "createStereoPanner"
-];
-
-const AUTOMATION_METHODS = [
-  "setValueAtTime", "linearRampToValueAtTime", "exponentialRampToValueAtTime",
-  "setTargetAtTime", "setValueCurveAtTime", "cancelScheduledValues"
-];
-
-const NODE_ROUTING_METHODS = [
-  "connect", "disconnect"
-];
-
 /**
  * An Audio Node actor allowing communication to a specific audio node in the
  * Audio Context graph.
  */
-types.addActorType("audionode");
-var AudioNodeActor = exports.AudioNodeActor = protocol.ActorClass({
-  typeName: "audionode",
-
+var AudioNodeActor = exports.AudioNodeActor = protocol.ActorClassWithSpec(audionodeSpec, {
   form: function (detail) {
     if (detail === "actorid") {
       return this.actorID;
     }
 
     return {
       actor: this.actorID, // actorID is set when this is added to a pool
       type: this.type,
@@ -101,76 +89,71 @@ var AudioNodeActor = exports.AudioNodeAc
       });
   },
 
   /**
    * Returns the string name of the audio type.
    *
    * DEPRECATED: Use `audionode.type` instead, left here for legacy reasons.
    */
-  getType: method(function () {
+  getType: function () {
     return this.type;
-  }, { response: { type: RetVal("string") }}),
+  },
 
   /**
    * Returns a boolean indicating if the AudioNode has been "bypassed",
    * via `AudioNodeActor#bypass` method.
    *
    * @return Boolean
    */
-  isBypassed: method(function () {
+  isBypassed: function () {
     let node = this.node.get();
     if (node === null) {
       return false;
     }
 
     // Cast to boolean incase `passThrough` is undefined,
     // like for AudioDestinationNode
     return !!node.passThrough;
-  }, {
-    response: { bypassed: RetVal("boolean") }
-  }),
+  },
 
   /**
    * Takes a boolean, either enabling or disabling the "passThrough" option
    * on an AudioNode. If a node is bypassed, an effects processing node (like gain, biquad),
    * will allow the audio stream to pass through the node, unaffected. Returns
    * the bypass state of the node.
    *
    * @param Boolean enable
    *        Whether the bypass value should be set on or off.
    * @return Boolean
    */
-  bypass: method(function (enable) {
+  bypass: function (enable) {
     let node = this.node.get();
 
     if (node === null) {
       return;
     }
 
     if (this.bypassable) {
       node.passThrough = enable;
     }
 
     return this.isBypassed();
-  }, {
-    request: { enable: Arg(0, "boolean") },
-    response: { bypassed: RetVal("boolean") }
-  }),
+  },
 
   /**
    * Changes a param on the audio node. Responds with either `undefined`
    * on success, or a description of the error upon param set failure.
    *
    * @param String param
    *        Name of the AudioParam to change.
    * @param String value
    *        Value to change AudioParam to.
    */
-  setParam: method(function (param, value) {
+  setParam: function (param, value) {
     let node = this.node.get();
 
     if (node === null) {
       return CollectedAudioNodeError();
     }
 
     try {
       if (isAudioParam(node, param)) {
@@ -179,31 +162,25 @@ var AudioNodeActor = exports.AudioNodeAc
       }
       else {
         node[param] = value;
       }
       return undefined;
     } catch (e) {
       return constructError(e);
     }
-  }, {
-    request: {
-      param: Arg(0, "string"),
-      value: Arg(1, "nullable:primitive")
-    },
-    response: { error: RetVal("nullable:json") }
-  }),
+  },
 
   /**
    * Gets a param on the audio node.
    *
    * @param String param
    *        Name of the AudioParam to fetch.
    */
-  getParam: method(function (param) {
+  getParam: function (param) {
     let node = this.node.get();
 
     if (node === null) {
       return CollectedAudioNodeError();
     }
 
     // Check to see if it's an AudioParam -- if so,
     // return the `value` property of the parameter.
@@ -212,54 +189,44 @@ var AudioNodeActor = exports.AudioNodeAc
     // Return the grip form of the value; at this time,
     // there shouldn't be any non-primitives at the moment, other than
     // AudioBuffer or Float32Array references and the like,
     // so this just formats the value to be displayed in the VariablesView,
     // without using real grips and managing via actor pools.
     let grip = createValueGrip(value, null, createObjectGrip);
 
     return grip;
-  }, {
-    request: {
-      param: Arg(0, "string")
-    },
-    response: { text: RetVal("nullable:primitive") }
-  }),
+  },
 
   /**
    * Get an object containing key-value pairs of additional attributes
    * to be consumed by a front end, like if a property should be read only,
    * or is a special type (Float32Array, Buffer, etc.)
    *
    * @param String param
    *        Name of the AudioParam whose flags are desired.
    */
-  getParamFlags: method(function (param) {
+  getParamFlags: function (param) {
     return ((AUDIO_NODE_DEFINITION[this.type] || {}).properties || {})[param];
-  }, {
-    request: { param: Arg(0, "string") },
-    response: { flags: RetVal("nullable:primitive") }
-  }),
+  },
 
   /**
    * Get an array of objects each containing a `param` and `value` property,
    * corresponding to a property name and current value of the audio node.
    */
-  getParams: method(function (param) {
+  getParams: function (param) {
     let props = Object.keys(AUDIO_NODE_DEFINITION[this.type].properties || {});
     return props.map(prop =>
       ({ param: prop, value: this.getParam(prop), flags: this.getParamFlags(prop) }));
-  }, {
-    response: { params: RetVal("json") }
-  }),
+  },
 
   /**
    * Connects this audionode to an AudioParam via `node.connect(param)`.
    */
-  connectParam: method(function (destActor, paramName, output) {
+  connectParam: function (destActor, paramName, output) {
     let srcNode = this.node.get();
     let destNode = destActor.node.get();
 
     if (srcNode === null || destNode === null) {
       return CollectedAudioNodeError();
     }
 
     try {
@@ -267,29 +234,22 @@ var AudioNodeActor = exports.AudioNodeAc
       // patched method that fires the webaudio actor's `connect-param` event.
       // Connect directly to the wrapped `destNode`, otherwise
       // the patched method thinks this is a new node and won't be
       // able to find it in `_nativeToActorID`.
       XPCNativeWrapper.unwrap(srcNode).connect(destNode[paramName], output);
     } catch (e) {
       return constructError(e);
     }
-  }, {
-    request: {
-      destActor: Arg(0, "audionode"),
-      paramName: Arg(1, "string"),
-      output: Arg(2, "nullable:number")
-    },
-    response: { error: RetVal("nullable:json") }
-  }),
+  },
 
   /**
    * Connects this audionode to another via `node.connect(dest)`.
    */
-  connectNode: method(function (destActor, output, input) {
+  connectNode: function (destActor, output, input) {
     let srcNode = this.node.get();
     let destNode = destActor.node.get();
 
     if (srcNode === null || destNode === null) {
       return CollectedAudioNodeError();
     }
 
     try {
@@ -297,48 +257,38 @@ var AudioNodeActor = exports.AudioNodeAc
       // patched method that fires the webaudio actor's `connect-node` event.
       // Connect directly to the wrapped `destNode`, otherwise
       // the patched method thinks this is a new node and won't be
       // able to find it in `_nativeToActorID`.
       XPCNativeWrapper.unwrap(srcNode).connect(destNode, output, input);
     } catch (e) {
       return constructError(e);
     }
-  }, {
-    request: {
-      destActor: Arg(0, "audionode"),
-      output: Arg(1, "nullable:number"),
-      input: Arg(2, "nullable:number")
-    },
-    response: { error: RetVal("nullable:json") }
-  }),
+  },
 
   /**
    * Disconnects this audionode from all connections via `node.disconnect()`.
    */
-  disconnect: method(function (destActor, output) {
+  disconnect: function (destActor, output) {
     let node = this.node.get();
 
     if (node === null) {
       return CollectedAudioNodeError();
     }
 
     try {
       // Disconnect via the unwrapped node, so we can call the
       // patched method that fires the webaudio actor's `disconnect` event.
       XPCNativeWrapper.unwrap(node).disconnect(output);
     } catch (e) {
       return constructError(e);
     }
-  }, {
-    request: { output: Arg(0, "nullable:number") },
-    response: { error: RetVal("nullable:json") }
-  }),
+  },
 
-  getAutomationData: method(function (paramName) {
+  getAutomationData: function (paramName) {
     let timeline = this.automation[paramName];
     if (!timeline) {
       return null;
     }
 
     let events = timeline.events;
     let values = [];
     let i = 0;
@@ -368,33 +318,30 @@ var AudioNodeActor = exports.AudioNodeAc
       for (; i < AUTOMATION_GRANULARITY_MAX; i++) {
         let delta = firstEvent.time + (++i * scale);
         let value = timeline.getValueAtTime(delta);
         values.push({ delta, value });
       }
     }
 
     return { events, values };
-  }, {
-    request: { paramName: Arg(0, "string") },
-    response: { values: RetVal("nullable:json") }
-  }),
+  },
 
   /**
    * Called via WebAudioActor, registers an automation event
    * for the AudioParam called.
    *
    * @param String paramName
    *        Name of the AudioParam.
    * @param String eventName
    *        Name of the automation event called.
    * @param Array args
    *        Arguments passed into the automation call.
    */
-  addAutomationEvent: method(function (paramName, eventName, args = []) {
+  addAutomationEvent: function (paramName, eventName, args = []) {
     let node = this.node.get();
     let timeline = this.automation[paramName];
 
     if (node === null) {
       return CollectedAudioNodeError();
     }
 
     if (!timeline || !node[paramName][eventName]) {
@@ -426,24 +373,17 @@ var AudioNodeActor = exports.AudioNodeAc
 
       // Apply the args back from the content scope, which is necessary
       // due to the method wrapping changing in bug 1130901 to be exported
       // directly to the content scope.
       param[eventName].apply(param, contentArgs);
     } catch (e) {
       return constructError(e);
     }
-  }, {
-    request: {
-      paramName: Arg(0, "string"),
-      eventName: Arg(1, "string"),
-      args: Arg(2, "nullable:json")
-    },
-    response: { error: RetVal("nullable:json") }
-  }),
+  },
 
   /**
    * Registers the automation event in the AudioNodeActor's
    * internal timeline. Called when setting automation via
    * `addAutomationEvent`, or from the WebAudioActor's listening
    * to the event firing via content.
    *
    * @param String paramName
@@ -455,57 +395,21 @@ var AudioNodeActor = exports.AudioNodeAc
    */
   _recordAutomationEvent: function (paramName, eventName, args) {
     let timeline = this.automation[paramName];
     timeline[eventName].apply(timeline, args);
   }
 });
 
 /**
- * The corresponding Front object for the AudioNodeActor.
- *
- * @attribute {String} type
- *            The type of audio node, like "OscillatorNode", "MediaElementAudioSourceNode"
- * @attribute {Boolean} source
- *            Boolean indicating if the node is a source node, like BufferSourceNode,
- *            MediaElementAudioSourceNode, OscillatorNode, etc.
- * @attribute {Boolean} bypassable
- *            Boolean indicating if the audio node is bypassable (splitter,
- *            merger and destination nodes, for example, are not)
- */
-var AudioNodeFront = protocol.FrontClass(AudioNodeActor, {
-  form: function (form, detail) {
-    if (detail === "actorid") {
-      this.actorID = form;
-      return;
-    }
-
-    this.actorID = form.actor;
-    this.type = form.type;
-    this.source = form.source;
-    this.bypassable = form.bypassable;
-  },
-
-  initialize: function (client, form) {
-    protocol.Front.prototype.initialize.call(this, client, form);
-    // if we were manually passed a form, this was created manually and
-    // needs to own itself for now.
-    if (form) {
-      this.manage(this);
-    }
-  }
-});
-
-/**
  * The Web Audio Actor handles simple interaction with an AudioContext
  * high-level methods. After instantiating this actor, you'll need to set it
  * up by calling setup().
  */
-var WebAudioActor = exports.WebAudioActor = protocol.ActorClass({
-  typeName: "webaudio",
+var WebAudioActor = exports.WebAudioActor = protocol.ActorClassWithSpec(webAudioSpec, {
   initialize: function (conn, tabActor) {
     protocol.Actor.prototype.initialize.call(this, conn);
     this.tabActor = tabActor;
 
     this._onContentFunctionCall = this._onContentFunctionCall.bind(this);
 
     // Store ChromeOnly ID (`nativeID` property on AudioNodeActor) mapped
     // to the associated actorID, so we don't have to expose `nativeID`
@@ -521,30 +425,28 @@ var WebAudioActor = exports.WebAudioActo
     protocol.Actor.prototype.destroy.call(this, conn);
     this.finalize();
   },
 
   /**
    * Returns definition of all AudioNodes, such as AudioParams, and
    * flags.
    */
-  getDefinition: method(function () {
+  getDefinition: function () {
     return AUDIO_NODE_DEFINITION;
-  }, {
-    response: { definition: RetVal("json") }
-  }),
+  },
 
   /**
    * Starts waiting for the current tab actor's document global to be
    * created, in order to instrument the Canvas context and become
    * aware of everything the content does with Web Audio.
    *
    * See ContentObserver and WebAudioInstrumenter for more details.
    */
-  setup: method(function ({ reload }) {
+  setup: function ({ reload }) {
     // Used to track when something is happening with the web audio API
     // the first time, to ultimately fire `start-context` event
     this._firstNodeCreated = false;
 
     // Clear out stored nativeIDs on reload as we do not want to track
     // AudioNodes that are no longer on this document.
     this._nativeToActorID.clear();
 
@@ -564,20 +466,17 @@ var WebAudioActor = exports.WebAudioActo
       storeCalls: false
     });
     // Bind to `window-ready` so we can reenable recording on the
     // call watcher
     on(this.tabActor, "window-ready", this._onGlobalCreated);
     // Bind to the `window-destroyed` event so we can unbind events between
     // the global destruction and the `finalize` cleanup method on the actor.
     on(this.tabActor, "window-destroyed", this._onGlobalDestroyed);
-  }, {
-    request: { reload: Option(0, "boolean") },
-    oneway: true
-  }),
+  },
 
   /**
    * Invoked whenever an instrumented function is called, like an AudioContext
    * method or an AudioNode method.
    */
   _onContentFunctionCall: function (functionCall) {
     let { name } = functionCall.details;
 
@@ -650,77 +549,30 @@ var WebAudioActor = exports.WebAudioActo
     });
   },
 
   /**
    * Stops listening for document global changes and puts this actor
    * to hibernation. This method is called automatically just before the
    * actor is destroyed.
    */
-  finalize: method(function () {
+  finalize: function () {
     if (!this._initialized) {
       return;
     }
     this._initialized = false;
     systemOff("webaudio-node-demise", this._onDestroyNode);
 
     off(this.tabActor, "window-destroyed", this._onGlobalDestroyed);
     off(this.tabActor, "window-ready", this._onGlobalCreated);
     this.tabActor = null;
     this._nativeToActorID = null;
     this._callWatcher.eraseRecording();
     this._callWatcher.finalize();
     this._callWatcher = null;
-  }, {
-    oneway: true
-  }),
-
-  /**
-   * Events emitted by this actor.
-   */
-  events: {
-    "start-context": {
-      type: "startContext"
-    },
-    "connect-node": {
-      type: "connectNode",
-      source: Option(0, "audionode"),
-      dest: Option(0, "audionode")
-    },
-    "disconnect-node": {
-      type: "disconnectNode",
-      source: Arg(0, "audionode")
-    },
-    "connect-param": {
-      type: "connectParam",
-      source: Option(0, "audionode"),
-      dest: Option(0, "audionode"),
-      param: Option(0, "string")
-    },
-    "change-param": {
-      type: "changeParam",
-      source: Option(0, "audionode"),
-      param: Option(0, "string"),
-      value: Option(0, "string")
-    },
-    "create-node": {
-      type: "createNode",
-      source: Arg(0, "audionode")
-    },
-    "destroy-node": {
-      type: "destroyNode",
-      source: Arg(0, "audionode")
-    },
-    "automation-event": {
-      type: "automationEvent",
-      node: Option(0, "audionode"),
-      paramName: Option(0, "string"),
-      eventName: Option(0, "string"),
-      args: Option(0, "json")
-    }
   },
 
   /**
    * Helper for constructing an AudioNodeActor, assigning to
    * internal weak map, and tracking via `manage` so it is assigned
    * an `actorID`.
    */
   _constructAudioNode: function (node) {
@@ -880,45 +732,16 @@ var WebAudioActor = exports.WebAudioActo
     if (this._nativeToActorID) {
       this._nativeToActorID.clear();
     }
     systemOff("webaudio-node-demise", this._onDestroyNode);
   }
 });
 
 /**
- * The corresponding Front object for the WebAudioActor.
- */
-var WebAudioFront = exports.WebAudioFront = protocol.FrontClass(WebAudioActor, {
-  initialize: function (client, { webaudioActor }) {
-    protocol.Front.prototype.initialize.call(this, client, { actor: webaudioActor });
-    this.manage(this);
-  },
-
-  /**
-   * If connecting to older geckos (<Fx43), where audio node actor's do not
-   * contain `type`, `source` and `bypassable` properties, fetch
-   * them manually here.
-   */
-  _onCreateNode: preEvent("create-node", function (audionode) {
-    if (!audionode.type) {
-      return audionode.getType().then(type => {
-        audionode.type = type;
-        audionode.source = !!AUDIO_NODE_DEFINITION[type].source;
-        audionode.bypassable = !AUDIO_NODE_DEFINITION[type].unbypassable;
-      });
-    }
-  }),
-});
-
-WebAudioFront.AUTOMATION_METHODS = new Set(AUTOMATION_METHODS);
-WebAudioFront.NODE_CREATION_METHODS = new Set(NODE_CREATION_METHODS);
-WebAudioFront.NODE_ROUTING_METHODS = new Set(NODE_ROUTING_METHODS);
-
-/**
  * Determines whether or not property is an AudioParam.
  *
  * @param AudioNode node
  *        An AudioNode.
  * @param String prop
  *        Property of `node` to evaluate to see if it's an AudioParam.
  * @return Boolean
  */
--- a/devtools/shared/fronts/moz.build
+++ b/devtools/shared/fronts/moz.build
@@ -9,10 +9,11 @@ DevToolsModules(
     'addons.js',
     'animation.js',
     'call-watcher.js',
     'css-properties.js',
     'highlighters.js',
     'inspector.js',
     'storage.js',
     'styles.js',
-    'stylesheets.js'
+    'stylesheets.js',
+    'webaudio.js'
 )
new file mode 100644
--- /dev/null
+++ b/devtools/shared/fronts/webaudio.js
@@ -0,0 +1,83 @@
+/* This Source Code Form is subject to the terms of the Mozilla Public
+ * License, v. 2.0. If a copy of the MPL was not distributed with this
+ * file, You can obtain one at http://mozilla.org/MPL/2.0/. */
+"use strict";
+
+const {
+  audionodeSpec,
+  webAudioSpec,
+  AUTOMATION_METHODS,
+  NODE_CREATION_METHODS,
+  NODE_ROUTING_METHODS,
+} = require("devtools/shared/specs/webaudio");
+const protocol = require("devtools/shared/protocol");
+const AUDIO_NODE_DEFINITION = require("devtools/server/actors/utils/audionodes.json");
+
+/**
+ * The corresponding Front object for the AudioNodeActor.
+ *
+ * @attribute {String} type
+ *            The type of audio node, like "OscillatorNode", "MediaElementAudioSourceNode"
+ * @attribute {Boolean} source
+ *            Boolean indicating if the node is a source node, like BufferSourceNode,
+ *            MediaElementAudioSourceNode, OscillatorNode, etc.
+ * @attribute {Boolean} bypassable
+ *            Boolean indicating if the audio node is bypassable (splitter,
+ *            merger and destination nodes, for example, are not)
+ */
+const AudioNodeFront = protocol.FrontClassWithSpec(audionodeSpec, {
+  form: function (form, detail) {
+    if (detail === "actorid") {
+      this.actorID = form;
+      return;
+    }
+
+    this.actorID = form.actor;
+    this.type = form.type;
+    this.source = form.source;
+    this.bypassable = form.bypassable;
+  },
+
+  initialize: function (client, form) {
+    protocol.Front.prototype.initialize.call(this, client, form);
+    // if we were manually passed a form, this was created manually and
+    // needs to own itself for now.
+    if (form) {
+      this.manage(this);
+    }
+  }
+});
+
+exports.AudioNodeFront = AudioNodeFront;
+
+/**
+ * The corresponding Front object for the WebAudioActor.
+ */
+const WebAudioFront = protocol.FrontClassWithSpec(webAudioSpec, {
+  initialize: function (client, { webaudioActor }) {
+    protocol.Front.prototype.initialize.call(this, client, { actor: webaudioActor });
+    this.manage(this);
+  },
+
+  /**
+   * If connecting to older geckos (<Fx43), where audio node actor's do not
+   * contain `type`, `source` and `bypassable` properties, fetch
+   * them manually here.
+   */
+  _onCreateNode: protocol.preEvent("create-node", function (audionode) {
+    if (!audionode.type) {
+      return audionode.getType().then(type => {
+        audionode.type = type;
+        audionode.source = !!AUDIO_NODE_DEFINITION[type].source;
+        audionode.bypassable = !AUDIO_NODE_DEFINITION[type].unbypassable;
+      });
+    }
+    return null;
+  }),
+});
+
+WebAudioFront.AUTOMATION_METHODS = new Set(AUTOMATION_METHODS);
+WebAudioFront.NODE_CREATION_METHODS = new Set(NODE_CREATION_METHODS);
+WebAudioFront.NODE_ROUTING_METHODS = new Set(NODE_ROUTING_METHODS);
+
+exports.WebAudioFront = WebAudioFront;
--- a/devtools/shared/specs/moz.build
+++ b/devtools/shared/specs/moz.build
@@ -12,10 +12,11 @@ DevToolsModules(
     'css-properties.js',
     'heap-snapshot-file.js',
     'highlighters.js',
     'inspector.js',
     'node.js',
     'storage.js',
     'styleeditor.js',
     'styles.js',
-    'stylesheets.js'
+    'stylesheets.js',
+    'webaudio.js'
 )
new file mode 100644
--- /dev/null
+++ b/devtools/shared/specs/webaudio.js
@@ -0,0 +1,163 @@
+/* This Source Code Form is subject to the terms of the Mozilla Public
+ * License, v. 2.0. If a copy of the MPL was not distributed with this
+ * file, You can obtain one at http://mozilla.org/MPL/2.0/. */
+"use strict";
+
+const {
+  Arg,
+  Option,
+  RetVal,
+  generateActorSpec,
+  types,
+} = require("devtools/shared/protocol");
+
+exports.NODE_CREATION_METHODS = [
+  "createBufferSource", "createMediaElementSource", "createMediaStreamSource",
+  "createMediaStreamDestination", "createScriptProcessor", "createAnalyser",
+  "createGain", "createDelay", "createBiquadFilter", "createWaveShaper",
+  "createPanner", "createConvolver", "createChannelSplitter", "createChannelMerger",
+  "createDynamicsCompressor", "createOscillator", "createStereoPanner"
+];
+
+exports.AUTOMATION_METHODS = [
+  "setValueAtTime", "linearRampToValueAtTime", "exponentialRampToValueAtTime",
+  "setTargetAtTime", "setValueCurveAtTime", "cancelScheduledValues"
+];
+
+exports.NODE_ROUTING_METHODS = [
+  "connect", "disconnect"
+];
+
+types.addActorType("audionode");
+const audionodeSpec = generateActorSpec({
+  typeName: "audionode",
+
+  methods: {
+    getType: { response: { type: RetVal("string") }},
+    isBypassed: {
+      response: { bypassed: RetVal("boolean") }
+    },
+    bypass: {
+      request: { enable: Arg(0, "boolean") },
+      response: { bypassed: RetVal("boolean") }
+    },
+    setParam: {
+      request: {
+        param: Arg(0, "string"),
+        value: Arg(1, "nullable:primitive")
+      },
+      response: { error: RetVal("nullable:json") }
+    },
+    getParam: {
+      request: {
+        param: Arg(0, "string")
+      },
+      response: { text: RetVal("nullable:primitive") }
+    },
+    getParamFlags: {
+      request: { param: Arg(0, "string") },
+      response: { flags: RetVal("nullable:primitive") }
+    },
+    getParams: {
+      response: { params: RetVal("json") }
+    },
+    connectParam: {
+      request: {
+        destActor: Arg(0, "audionode"),
+        paramName: Arg(1, "string"),
+        output: Arg(2, "nullable:number")
+      },
+      response: { error: RetVal("nullable:json") }
+    },
+    connectNode: {
+      request: {
+        destActor: Arg(0, "audionode"),
+        output: Arg(1, "nullable:number"),
+        input: Arg(2, "nullable:number")
+      },
+      response: { error: RetVal("nullable:json") }
+    },
+    disconnect: {
+      request: { output: Arg(0, "nullable:number") },
+      response: { error: RetVal("nullable:json") }
+    },
+    getAutomationData: {
+      request: { paramName: Arg(0, "string") },
+      response: { values: RetVal("nullable:json") }
+    },
+    addAutomationEvent: {
+      request: {
+        paramName: Arg(0, "string"),
+        eventName: Arg(1, "string"),
+        args: Arg(2, "nullable:json")
+      },
+      response: { error: RetVal("nullable:json") }
+    },
+  }
+});
+
+exports.audionodeSpec = audionodeSpec;
+
+const webAudioSpec = generateActorSpec({
+  typeName: "webaudio",
+
+  /**
+   * Events emitted by this actor.
+   */
+  events: {
+    "start-context": {
+      type: "startContext"
+    },
+    "connect-node": {
+      type: "connectNode",
+      source: Option(0, "audionode"),
+      dest: Option(0, "audionode")
+    },
+    "disconnect-node": {
+      type: "disconnectNode",
+      source: Arg(0, "audionode")
+    },
+    "connect-param": {
+      type: "connectParam",
+      source: Option(0, "audionode"),
+      dest: Option(0, "audionode"),
+      param: Option(0, "string")
+    },
+    "change-param": {
+      type: "changeParam",
+      source: Option(0, "audionode"),
+      param: Option(0, "string"),
+      value: Option(0, "string")
+    },
+    "create-node": {
+      type: "createNode",
+      source: Arg(0, "audionode")
+    },
+    "destroy-node": {
+      type: "destroyNode",
+      source: Arg(0, "audionode")
+    },
+    "automation-event": {
+      type: "automationEvent",
+      node: Option(0, "audionode"),
+      paramName: Option(0, "string"),
+      eventName: Option(0, "string"),
+      args: Option(0, "json")
+    }
+  },
+
+  methods: {
+    getDefinition: {
+      response: { definition: RetVal("json") }
+    },
+    setup: {
+      request: { reload: Option(0, "boolean") },
+      oneway: true
+    },
+    finalize: {
+      oneway: true
+    }
+  }
+});
+
+exports.webAudioSpec = webAudioSpec;
new file mode 100644