Bug 943461. Part 11: Don't tear down an OfflineAudioContext until its current time has actually reached its end, to ensure that all relevant stream state changes have been forwarded to the main thread. r=padenot
authorRobert O'Callahan <robert@ocallahan.org>
Mon, 09 Dec 2013 18:08:02 +1300
changeset 162328 939ac6298d2a90e0bcafec32e83aa589fd8e0199
parent 162327 bed75e8c44b1b5fd21f8b1257ffd486b7dea75b8
child 162329 c735d3475955280cfd887d45374d6430acc628c8
push idunknown
push userunknown
push dateunknown
reviewerspadenot
bugs943461
milestone29.0a1
Bug 943461. Part 11: Don't tear down an OfflineAudioContext until its current time has actually reached its end, to ensure that all relevant stream state changes have been forwarded to the main thread. r=padenot
content/media/MediaStreamGraph.cpp
content/media/MediaStreamGraph.h
content/media/MediaStreamGraphImpl.h
content/media/webaudio/AudioDestinationNode.cpp
content/media/webaudio/AudioDestinationNode.h
--- a/content/media/MediaStreamGraph.cpp
+++ b/content/media/MediaStreamGraph.cpp
@@ -1096,16 +1096,28 @@ MediaStreamGraphImpl::ProduceDataForStre
         ps->ProduceOutput(t, next, (next == aTo) ? ProcessedMediaStream::ALLOW_FINISH : 0);
       }
     }
     t = next;
   }
   NS_ASSERTION(t == aTo, "Something went wrong with rounding to block boundaries");
 }
 
+bool
+MediaStreamGraphImpl::AllFinishedStreamsNotified()
+{
+  for (uint32_t i = 0; i < mStreams.Length(); ++i) {
+    MediaStream* s = mStreams[i];
+    if (s->mFinished && !s->mNotifiedFinished) {
+      return false;
+    }
+  }
+  return true;
+}
+
 void
 MediaStreamGraphImpl::RunThread()
 {
   nsTArray<MessageBlock> messageQueue;
   {
     MonitorAutoLock lock(mMonitor);
     messageQueue.SwapElements(mMessageQueue);
   }
@@ -1218,37 +1230,27 @@ MediaStreamGraphImpl::RunThread()
       if (is) {
         UpdateBufferSufficiencyState(is);
       }
       GraphTime end;
       if (!stream->mBlocked.GetAt(mCurrentTime, &end) || end < GRAPH_TIME_MAX) {
         allBlockedForever = false;
       }
     }
-    if (!mRealtime) {
-      // Terminate processing if we've produce enough non-realtime ticks.
-      if (!mForceShutDown && ticksProcessed >= mNonRealtimeTicksToProcess) {
-        // Wait indefinitely when we've processed enough non-realtime ticks.
-        // We'll be woken up when the graph shuts down.
-        MonitorAutoLock lock(mMonitor);
-        PrepareUpdatesToMainThreadState(true);
-        mWaitState = WAITSTATE_WAITING_INDEFINITELY;
-        mMonitor.Wait(PR_INTERVAL_NO_TIMEOUT);
-      }
-    }
     if (ensureNextIteration || !allBlockedForever || audioStreamsActive > 0) {
       EnsureNextIteration();
     }
 
     // Send updates to the main thread and wait for the next control loop
     // iteration.
     {
       MonitorAutoLock lock(mMonitor);
-      bool finalUpdate = (mForceShutDown ||
-                          (IsEmpty() && mMessageQueue.IsEmpty()));
+      bool finalUpdate = mForceShutDown ||
+        (mCurrentTime >= mEndTime && AllFinishedStreamsNotified()) ||
+        (IsEmpty() && mMessageQueue.IsEmpty());
       PrepareUpdatesToMainThreadState(finalUpdate);
       if (finalUpdate) {
         // Enter shutdown mode. The stable-state handler will detect this
         // and complete shutdown. Destroy any streams immediately.
         STREAM_LOG(PR_LOG_DEBUG, ("MediaStreamGraph %p waiting for main thread cleanup", this));
         // Commit to shutting down this graph object.
         mLifecycleState = LIFECYCLE_WAITING_FOR_MAIN_THREAD_CLEANUP;
         // No need to Destroy streams here. The main-thread owner of each
@@ -2393,17 +2395,17 @@ static const int32_t INITIAL_CURRENT_TIM
 MediaStreamGraphImpl::MediaStreamGraphImpl(bool aRealtime)
   : mCurrentTime(INITIAL_CURRENT_TIME)
   , mStateComputedTime(INITIAL_CURRENT_TIME)
   , mProcessingGraphUpdateIndex(0)
   , mPortCount(0)
   , mMonitor("MediaStreamGraphImpl")
   , mLifecycleState(LIFECYCLE_THREAD_NOT_STARTED)
   , mWaitState(WAITSTATE_RUNNING)
-  , mNonRealtimeTicksToProcess(0)
+  , mEndTime(GRAPH_TIME_MAX)
   , mNeedAnotherIteration(false)
   , mForceShutDown(false)
   , mPostedRunInStableStateEvent(false)
   , mNonRealtimeIsRunning(false)
   , mDetectedNotRunning(false)
   , mPostedRunInStableState(false)
   , mRealtime(aRealtime)
   , mNonRealtimeProcessing(false)
@@ -2472,17 +2474,17 @@ MediaStreamGraph::DestroyNonRealtimeInst
   MOZ_ASSERT(aGraph->IsNonRealtime(), "Should not destroy the global graph here");
 
   MediaStreamGraphImpl* graph = static_cast<MediaStreamGraphImpl*>(aGraph);
   if (graph->mForceShutDown)
     return; // already done
 
   if (!graph->mNonRealtimeProcessing) {
     // Start the graph, but don't produce anything
-    graph->StartNonRealtimeProcessing(0);
+    graph->StartNonRealtimeProcessing(1, 0);
   }
   graph->ForceShutDown();
 }
 
 SourceMediaStream*
 MediaStreamGraph::CreateSourceStream(DOMMediaStream* aWrapper)
 {
   SourceMediaStream* stream = new SourceMediaStream(aWrapper);
@@ -2543,26 +2545,26 @@ MediaStreamGraph::CreateAudioNodeStream(
 
 bool
 MediaStreamGraph::IsNonRealtime() const
 {
   return this != gGraph;
 }
 
 void
-MediaStreamGraph::StartNonRealtimeProcessing(uint32_t aTicksToProcess)
+MediaStreamGraph::StartNonRealtimeProcessing(TrackRate aRate, uint32_t aTicksToProcess)
 {
   NS_ASSERTION(NS_IsMainThread(), "main thread only");
 
   MediaStreamGraphImpl* graph = static_cast<MediaStreamGraphImpl*>(this);
   NS_ASSERTION(!graph->mRealtime, "non-realtime only");
 
   if (graph->mNonRealtimeProcessing)
     return;
-  graph->mNonRealtimeTicksToProcess = aTicksToProcess;
+  graph->mEndTime = graph->mCurrentTime + TicksToTimeRoundUp(aRate, aTicksToProcess);
   graph->mNonRealtimeProcessing = true;
   graph->EnsureRunInStableState();
 }
 
 void
 ProcessedMediaStream::AddInput(MediaInputPort* aPort)
 {
   mInputs.AppendElement(aPort);
--- a/content/media/MediaStreamGraph.h
+++ b/content/media/MediaStreamGraph.h
@@ -1051,17 +1051,17 @@ public:
   AudioNodeExternalInputStream*
   CreateAudioNodeExternalInputStream(AudioNodeEngine* aEngine,
                                      TrackRate aSampleRate = 0);
 
   bool IsNonRealtime() const;
   /**
    * Start processing non-realtime for a specific number of ticks.
    */
-  void StartNonRealtimeProcessing(uint32_t aTicksToProcess);
+  void StartNonRealtimeProcessing(TrackRate aRate, uint32_t aTicksToProcess);
 
   /**
    * Media graph thread only.
    * Dispatches a runnable that will run on the main thread after all
    * main-thread stream state has been next updated.
    * Should only be called during MediaStreamListener callbacks or during
    * ProcessedMediaStream::ProduceOutput().
    */
--- a/content/media/MediaStreamGraphImpl.h
+++ b/content/media/MediaStreamGraphImpl.h
@@ -182,16 +182,21 @@ public:
    */
   void EnsureStableStateEventPosted();
   /**
    * Generate messages to the main thread to update it for all state changes.
    * mMonitor must be held.
    */
   void PrepareUpdatesToMainThreadState(bool aFinalUpdate);
   /**
+   * Returns false if there is any stream that has finished but not yet finished
+   * playing out.
+   */
+  bool AllFinishedStreamsNotified();
+  /**
    * If we are rendering in non-realtime mode, we don't want to send messages to
    * the main thread at each iteration for performance reasons. We instead
    * notify the main thread at the same rate
    */
   bool ShouldUpdateMainThread();
   // The following methods are the various stages of RunThread processing.
   /**
    * Compute a new current time for the graph and advance all on-graph-thread
@@ -495,19 +500,19 @@ public:
     // RunThread() is paused indefinitely waiting for something to change
     WAITSTATE_WAITING_INDEFINITELY,
     // Something has signaled RunThread() to wake up immediately,
     // but it hasn't done so yet
     WAITSTATE_WAKING_UP
   };
   WaitState mWaitState;
   /**
-   * How many non-realtime ticks the graph should process.
+   * The graph should stop processing at or after this time.
    */
-  uint32_t mNonRealtimeTicksToProcess;
+  GraphTime mEndTime;
   /**
    * True when another iteration of the control loop is required.
    */
   bool mNeedAnotherIteration;
   /**
    * True when we need to do a forced shutdown during application shutdown.
    */
   bool mForceShutDown;
--- a/content/media/webaudio/AudioDestinationNode.cpp
+++ b/content/media/webaudio/AudioDestinationNode.cpp
@@ -59,16 +59,22 @@ public:
     // will not go anywhere.
     *aOutput = aInput;
 
     // Handle the case of allocation failure in the input buffer
     if (mInputChannels.IsEmpty()) {
       return;
     }
 
+    if (mWriteIndex >= mLength) {
+      NS_ASSERTION(mWriteIndex == mLength, "Overshot length");
+      // Don't record any more.
+      return;
+    }
+
     // Record our input buffer
     MOZ_ASSERT(mWriteIndex < mLength, "How did this happen?");
     const uint32_t duration = std::min(WEBAUDIO_BLOCK_SIZE, mLength - mWriteIndex);
     const uint32_t commonChannelCount = std::min(mInputChannels.Length(),
                                                  aInput.mChannelData.Length());
     // First, copy as many channels in the input as we have
     for (uint32_t i = 0; i < commonChannelCount; ++i) {
       if (aInput.IsNull()) {
@@ -91,93 +97,54 @@ public:
       }
     }
     // Then, silence all of the remaining channels
     for (uint32_t i = commonChannelCount; i < mInputChannels.Length(); ++i) {
       PodZero(mInputChannels[i] + mWriteIndex, duration);
     }
     mWriteIndex += duration;
 
-    if (mWriteIndex == mLength) {
-      SendBufferToMainThread(aStream);
+    if (mWriteIndex >= mLength) {
+      NS_ASSERTION(mWriteIndex == mLength, "Overshot length");
+      // Go to finished state. When the graph's current time eventually reaches
+      // the end of the stream, then the main thread will be notified and we'll
+      // shut down the AudioContext.
       *aFinished = true;
     }
   }
 
-  void SendBufferToMainThread(AudioNodeStream* aStream)
+  void FireOfflineCompletionEvent(AudioDestinationNode* aNode)
   {
-    class Command : public nsRunnable
-    {
-    public:
-      Command(AudioNodeStream* aStream,
-              InputChannels& aInputChannels,
-              uint32_t aLength,
-              float aSampleRate)
-        : mStream(aStream)
-        , mLength(aLength)
-        , mSampleRate(aSampleRate)
-      {
-        mInputChannels.SwapElements(aInputChannels);
-      }
+    AudioContext* context = aNode->Context();
+    context->Shutdown();
+    // Shutdown drops self reference, but the context is still referenced by aNode,
+    // which is strongly referenced by the runnable that called
+    // AudioDestinationNode::FireOfflineCompletionEvent.
 
-      NS_IMETHODIMP Run()
-      {
-        // If it's not safe to run scripts right now, schedule this to run later
-        if (!nsContentUtils::IsSafeToRunScript()) {
-          nsContentUtils::AddScriptRunner(this);
-          return NS_OK;
-        }
+    AutoPushJSContext cx(context->GetJSContext());
+    if (!cx) {
+      return;
+    }
+    JSAutoRequest ar(cx);
 
-        nsRefPtr<AudioContext> context;
-        {
-          MutexAutoLock lock(mStream->Engine()->NodeMutex());
-          AudioNode* node = mStream->Engine()->Node();
-          if (node) {
-            context = node->Context();
-            MOZ_ASSERT(context, "node hasn't kept context alive");
-          }
-        }
-        if (!context) {
-          return NS_OK;
-        }
-        context->Shutdown(); // drops self reference
-
-        AutoPushJSContext cx(context->GetJSContext());
-        if (cx) {
-          JSAutoRequest ar(cx);
+    // Create the input buffer
+    nsRefPtr<AudioBuffer> renderedBuffer = new AudioBuffer(context,
+                                                           mLength,
+                                                           mSampleRate);
+    if (!renderedBuffer->InitializeBuffers(mInputChannels.Length(), cx)) {
+      return;
+    }
+    for (uint32_t i = 0; i < mInputChannels.Length(); ++i) {
+      renderedBuffer->SetRawChannelContents(cx, i, mInputChannels[i]);
+    }
 
-          // Create the input buffer
-          nsRefPtr<AudioBuffer> renderedBuffer = new AudioBuffer(context,
-                                                                 mLength,
-                                                                 mSampleRate);
-          if (!renderedBuffer->InitializeBuffers(mInputChannels.Length(), cx)) {
-            return NS_OK;
-          }
-          for (uint32_t i = 0; i < mInputChannels.Length(); ++i) {
-            renderedBuffer->SetRawChannelContents(cx, i, mInputChannels[i]);
-          }
-
-          nsRefPtr<OfflineAudioCompletionEvent> event =
-              new OfflineAudioCompletionEvent(context, nullptr, nullptr);
-          event->InitEvent(renderedBuffer);
-          context->DispatchTrustedEvent(event);
-        }
-
-        return NS_OK;
-      }
-    private:
-      nsRefPtr<AudioNodeStream> mStream;
-      InputChannels mInputChannels;
-      uint32_t mLength;
-      float mSampleRate;
-    };
-
-    // Empty out the source array to make sure we don't attempt to collect
-    // more input data in the future.
-    NS_DispatchToMainThread(new Command(aStream, mInputChannels, mLength, mSampleRate));
+    nsRefPtr<OfflineAudioCompletionEvent> event =
+        new OfflineAudioCompletionEvent(context, nullptr, nullptr);
+    event->InitEvent(renderedBuffer);
+    context->DispatchTrustedEvent(event);
   }
 
 private:
   // The input to the destination node is recorded in the mInputChannels buffer.
   // When this buffer fills up with mLength frames, the buffered input is sent
   // to the main thread in order to dispatch OfflineAudioCompletionEvent.
   InputChannels mInputChannels;
   // An index representing the next offset in mInputChannels to be written to.
@@ -243,26 +210,29 @@ AudioDestinationNode::AudioDestinationNo
                                            uint32_t aLength,
                                            float aSampleRate)
   : AudioNode(aContext,
               aIsOffline ? aNumberOfChannels : 2,
               ChannelCountMode::Explicit,
               ChannelInterpretation::Speakers)
   , mFramesToProduce(aLength)
   , mAudioChannel(AudioChannel::Normal)
+  , mIsOffline(aIsOffline)
+  , mHasFinished(false)
 {
   MediaStreamGraph* graph = aIsOffline ?
                             MediaStreamGraph::CreateNonRealtimeInstance() :
                             MediaStreamGraph::GetInstance();
   AudioNodeEngine* engine = aIsOffline ?
                             new OfflineDestinationNodeEngine(this, aNumberOfChannels,
                                                              aLength, aSampleRate) :
                             static_cast<AudioNodeEngine*>(new DestinationNodeEngine(this));
 
   mStream = graph->CreateAudioNodeStream(engine, MediaStreamGraph::EXTERNAL_STREAM);
+  mStream->AddMainThreadListener(this);
 
   if (!aIsOffline && UseAudioChannelService()) {
     nsCOMPtr<nsIDOMEventTarget> target = do_QueryInterface(GetOwner());
     if (target) {
       target->AddSystemEventListener(NS_LITERAL_STRING("visibilitychange"), this,
                                      /* useCapture = */ true,
                                      /* wantsUntrusted = */ false);
     }
@@ -283,23 +253,46 @@ AudioDestinationNode::DestroyMediaStream
 
     target->RemoveSystemEventListener(NS_LITERAL_STRING("visibilitychange"), this,
                                       /* useCapture = */ true);
   }
 
   if (!mStream)
     return;
 
+  mStream->RemoveMainThreadListener(this);
   MediaStreamGraph* graph = mStream->Graph();
   if (graph->IsNonRealtime()) {
     MediaStreamGraph::DestroyNonRealtimeInstance(graph);
   }
   AudioNode::DestroyMediaStream();
 }
 
+void
+AudioDestinationNode::NotifyMainThreadStateChanged()
+{
+  if (mStream->IsFinished() && !mHasFinished) {
+    mHasFinished = true;
+    if (mIsOffline) {
+      nsCOMPtr<nsIRunnable> runnable =
+        NS_NewRunnableMethod(this, &AudioDestinationNode::FireOfflineCompletionEvent);
+      NS_DispatchToCurrentThread(runnable);
+    }
+  }
+}
+
+void
+AudioDestinationNode::FireOfflineCompletionEvent()
+{
+  AudioNodeStream* stream = static_cast<AudioNodeStream*>(Stream());
+  OfflineDestinationNodeEngine* engine =
+    static_cast<OfflineDestinationNodeEngine*>(stream->Engine());
+  engine->FireOfflineCompletionEvent(this);
+}
+
 uint32_t
 AudioDestinationNode::MaxChannelCount() const
 {
   return Context()->MaxChannelCount();
 }
 
 void
 AudioDestinationNode::SetChannelCount(uint32_t aChannelCount, ErrorResult& aRv)
@@ -341,17 +334,17 @@ AudioDestinationNode::WrapObject(JSConte
 {
   return AudioDestinationNodeBinding::Wrap(aCx, aScope, this);
 }
 
 void
 AudioDestinationNode::StartRendering()
 {
   mOfflineRenderingRef.Take(this);
-  mStream->Graph()->StartNonRealtimeProcessing(mFramesToProduce);
+  mStream->Graph()->StartNonRealtimeProcessing(TrackRate(Context()->SampleRate()), mFramesToProduce);
 }
 
 void
 AudioDestinationNode::SetCanPlay(bool aCanPlay)
 {
   mStream->SetTrackEnabled(AudioNodeStream::AUDIO_TRACK, aCanPlay);
 }
 
--- a/content/media/webaudio/AudioDestinationNode.h
+++ b/content/media/webaudio/AudioDestinationNode.h
@@ -18,16 +18,17 @@ namespace mozilla {
 namespace dom {
 
 class AudioContext;
 
 class AudioDestinationNode : public AudioNode
                            , public nsIDOMEventListener
                            , public nsIAudioChannelAgentCallback
                            , public nsSupportsWeakReference
+                           , public MainThreadMediaStreamListener
 {
 public:
   // This node type knows what MediaStreamGraph to use based on
   // whether it's in offline mode.
   AudioDestinationNode(AudioContext* aContext,
                        bool aIsOffline,
                        uint32_t aNumberOfChannels = 0,
                        uint32_t aLength = 0,
@@ -61,28 +62,33 @@ public:
   NS_IMETHOD HandleEvent(nsIDOMEvent* aEvent);
 
   // nsIAudioChannelAgentCallback
   NS_IMETHOD CanPlayChanged(int32_t aCanPlay);
 
   AudioChannel MozAudioChannelType() const;
   void SetMozAudioChannelType(AudioChannel aValue, ErrorResult& aRv);
 
+  virtual void NotifyMainThreadStateChanged() MOZ_OVERRIDE;
+  void FireOfflineCompletionEvent();
+
 private:
   bool CheckAudioChannelPermissions(AudioChannel aValue);
   void CreateAudioChannelAgent();
 
   void SetCanPlay(bool aCanPlay);
 
   SelfReference<AudioDestinationNode> mOfflineRenderingRef;
   uint32_t mFramesToProduce;
 
   nsCOMPtr<nsIAudioChannelAgent> mAudioChannelAgent;
 
   // Audio Channel Type.
   AudioChannel mAudioChannel;
+  bool mIsOffline;
+  bool mHasFinished;
 };
 
 }
 }
 
 #endif