bug 1391482 add a mechanism to pass an AudioChunk from node to engine r=padenot
authorKarl Tomlinson <karlt+@karlt.net>
Tue, 01 Aug 2017 20:04:56 +1200
changeset 377730 043d871b9b8173d4ba8b9412a1e400ae362a31ed
parent 377729 f8fc4ff76ed9d4ba404c138d303a5044fbc8ea9f
child 377731 4192bfee266cab007b7d9835ac3589bfaa00d5a5
push id94338
push userkwierso@gmail.com
push dateThu, 31 Aug 2017 02:58:58 +0000
treeherdermozilla-inbound@9ca18987dabb [default view] [failures only]
perfherder[talos] [build metrics] [platform microbench] (compared to previous push)
reviewerspadenot
bugs1391482
milestone57.0a1
first release with
nightly linux32
nightly linux64
nightly mac
nightly win32
nightly win64
last release without
nightly linux32
nightly linux64
nightly mac
nightly win32
nightly win64
bug 1391482 add a mechanism to pass an AudioChunk from node to engine r=padenot MozReview-Commit-ID: Akfy9xDKzXg
dom/media/webaudio/AudioNodeEngine.h
dom/media/webaudio/AudioNodeStream.cpp
dom/media/webaudio/AudioNodeStream.h
--- a/dom/media/webaudio/AudioNodeEngine.h
+++ b/dom/media/webaudio/AudioNodeEngine.h
@@ -293,16 +293,20 @@ public:
                                        const dom::ThreeDPoint& aValue)
   {
     NS_ERROR("Invalid SetThreeDPointParameter index");
   }
   virtual void SetBuffer(already_AddRefed<ThreadSharedFloatArrayBufferList> aBuffer)
   {
     NS_ERROR("SetBuffer called on engine that doesn't support it");
   }
+  virtual void SetBuffer(AudioChunk&& aBuffer)
+  {
+    NS_ERROR("SetBuffer called on engine that doesn't support it");
+  }
   // This consumes the contents of aData.  aData will be emptied after this returns.
   virtual void SetRawArrayData(nsTArray<float>& aData)
   {
     NS_ERROR("SetRawArrayData called on an engine that doesn't support it");
   }
 
   /**
    * Produce the next block of audio samples, given input samples aInput
--- a/dom/media/webaudio/AudioNodeStream.cpp
+++ b/dom/media/webaudio/AudioNodeStream.cpp
@@ -270,16 +270,36 @@ AudioNodeStream::SetBuffer(already_AddRe
     }
     RefPtr<ThreadSharedFloatArrayBufferList> mBuffer;
   };
 
   GraphImpl()->AppendMessage(MakeUnique<Message>(this, aBuffer));
 }
 
 void
+AudioNodeStream::SetBuffer(AudioChunk&& aBuffer)
+{
+  class Message final : public ControlMessage
+  {
+  public:
+    Message(AudioNodeStream* aStream, AudioChunk&& aBuffer)
+      : ControlMessage(aStream), mBuffer(aBuffer)
+    {}
+    void Run() override
+    {
+      static_cast<AudioNodeStream*>(mStream)->Engine()->
+        SetBuffer(Move(mBuffer));
+    }
+    AudioChunk mBuffer;
+  };
+
+  GraphImpl()->AppendMessage(MakeUnique<Message>(this, Move(aBuffer)));
+}
+
+void
 AudioNodeStream::SetRawArrayData(nsTArray<float>& aData)
 {
   class Message final : public ControlMessage
   {
   public:
     Message(AudioNodeStream* aStream,
             nsTArray<float>& aData)
       : ControlMessage(aStream)
--- a/dom/media/webaudio/AudioNodeStream.h
+++ b/dom/media/webaudio/AudioNodeStream.h
@@ -87,16 +87,17 @@ public:
    * This time is converted to a time relative to this stream when it's set.
    */
   void SetStreamTimeParameter(uint32_t aIndex, AudioContext* aContext,
                               double aStreamTime);
   void SetDoubleParameter(uint32_t aIndex, double aValue);
   void SetInt32Parameter(uint32_t aIndex, int32_t aValue);
   void SetThreeDPointParameter(uint32_t aIndex, const dom::ThreeDPoint& aValue);
   void SetBuffer(already_AddRefed<ThreadSharedFloatArrayBufferList>&& aBuffer);
+  void SetBuffer(AudioChunk&& aBuffer);
   // This sends a single event to the timeline on the MSG thread side.
   void SendTimelineEvent(uint32_t aIndex, const dom::AudioTimelineEvent& aEvent);
   // This consumes the contents of aData.  aData will be emptied after this returns.
   void SetRawArrayData(nsTArray<float>& aData);
   void SetChannelMixingParameters(uint32_t aNumberOfChannels,
                                   ChannelCountMode aChannelCountMoe,
                                   ChannelInterpretation aChannelInterpretation);
   void SetPassThrough(bool aPassThrough);