Bug 1266438 - Reset the AEC and other processing when audio devices change. r=jesup
authorPaul Adenot <paul@paul.cx>
Fri, 22 Apr 2016 16:24:17 +0200
changeset 318330 5663efa6c13cb03d23f4b840b433fb4e4498611d
parent 318329 969dc22def1815ad8924a312e7ff997cd6aa0e1c
child 318331 099d4414bd63296d586699b6e2b9bae068a4c4eb
push id9480
push userjlund@mozilla.com
push dateMon, 25 Apr 2016 17:12:58 +0000
treeherdermozilla-aurora@0d6a91c76a9e [default view] [failures only]
perfherder[talos] [build metrics] [platform microbench] (compared to previous push)
reviewersjesup
bugs1266438
milestone48.0a1
Bug 1266438 - Reset the AEC and other processing when audio devices change. r=jesup MozReview-Commit-ID: Jrr9E9ZSukv
dom/media/GraphDriver.cpp
dom/media/GraphDriver.h
dom/media/MediaStreamGraph.cpp
dom/media/MediaStreamGraph.h
dom/media/webrtc/MediaEngineDefault.h
dom/media/webrtc/MediaEngineWebRTC.h
dom/media/webrtc/MediaEngineWebRTCAudio.cpp
--- a/dom/media/GraphDriver.cpp
+++ b/dom/media/GraphDriver.cpp
@@ -18,17 +18,17 @@
 #include <sys/sysctl.h>
 #endif
 
 extern mozilla::LazyLogModule gMediaStreamGraphLog;
 #define STREAM_LOG(type, msg) MOZ_LOG(gMediaStreamGraphLog, type, msg)
 
 // We don't use NSPR log here because we want this interleaved with adb logcat
 // on Android/B2G
-// #define ENABLE_LIFECYCLE_LOG
+#define ENABLE_LIFECYCLE_LOG
 #ifdef ENABLE_LIFECYCLE_LOG
 #ifdef ANDROID
 #include "android/log.h"
 #define LIFECYCLE_LOG(...)  __android_log_print(ANDROID_LOG_INFO, "Gecko - MSG" , __VA_ARGS__); printf(__VA_ARGS__);printf("\n");
 #else
 #define LIFECYCLE_LOG(...) printf(__VA_ARGS__);printf("\n");
 #endif
 #else
@@ -655,22 +655,19 @@ AudioCallbackDriver::Init()
       MonitorAutoLock lock(GraphImpl()->GetMonitor());
       SetNextDriver(new SystemClockDriver(GraphImpl()));
       NextDriver()->SetGraphTime(this, mIterationStart, mIterationEnd);
       mGraphImpl->SetCurrentDriver(NextDriver());
       NextDriver()->Start();
       return;
     }
   }
-#ifdef XP_MACOSX
-  // Currently, only mac cares about this
   bool aec;
   Unused << mGraphImpl->AudioTrackPresent(aec);
   SetMicrophoneActive(aec);
-#endif
 
   cubeb_stream_register_device_changed_callback(mAudioStream,
                                                 AudioCallbackDriver::DeviceChangedCallback_s);
 
   StartStream();
 
   STREAM_LOG(LogLevel::Debug, ("AudioCallbackDriver started."));
 }
@@ -1065,44 +1062,24 @@ void AudioCallbackDriver::PanOutputIfNee
       cubeb_stream_device_destroy(mAudioStream, out);
     }
   }
 #endif
 }
 
 void
 AudioCallbackDriver::DeviceChangedCallback() {
+  // Tell the audio engine the device has changed, it might want to reset some
+  // state.
+  MonitorAutoLock mon(mGraphImpl->GetMonitor());
+  if (mAudioInput) {
+    mAudioInput->DeviceChanged();
+  }
 #ifdef XP_MACOSX
-  MonitorAutoLock mon(mGraphImpl->GetMonitor());
   PanOutputIfNeeded(mMicrophoneActive);
-  // On OSX, changing the output device causes the audio thread to no call the
-  // audio callback, so we're unable to process real-time input data, and this
-  // results in latency building up.
-  // We switch to a system driver until audio callbacks are called again, so we
-  // still pull from the input stream, so that everything works apart from the
-  // audio output.
-
-  // Don't bother doing the device switching dance if the graph is not RUNNING
-  // (starting up, shutting down), because we haven't started pulling from the
-  // SourceMediaStream.
-  if (!GraphImpl()->Running()) {
-    return;
-  }
-
-  if (mSelfReference) {
-    return;
-  }
-  STREAM_LOG(LogLevel::Error, ("Switching to SystemClockDriver during output switch"));
-  mSelfReference.Take(this);
-  mCallbackReceivedWhileSwitching = 0;
-  SetNextDriver(new SystemClockDriver(GraphImpl()));
-  RemoveCallback();
-  mNextDriver->SetGraphTime(this, mIterationStart, mIterationEnd);
-  mGraphImpl->SetCurrentDriver(mNextDriver);
-  mNextDriver->Start();
 #endif
 }
 
 void
 AudioCallbackDriver::SetMicrophoneActive(bool aActive)
 {
 #ifdef XP_MACOSX
   MonitorAutoLock mon(mGraphImpl->GetMonitor());
--- a/dom/media/GraphDriver.h
+++ b/dom/media/GraphDriver.h
@@ -525,23 +525,24 @@ private:
   /* This is set during initialization, and can be read safely afterwards. */
   dom::AudioChannel mAudioChannel;
   /* Used to queue us to add the mixer callback on first run. */
   bool mAddedMixer;
 
   /* This is atomic and is set by the audio callback thread. It can be read by
    * any thread safely. */
   Atomic<bool> mInCallback;
-
-#ifdef XP_MACOSX
   /**
    * True if microphone is being used by this process. This is synchronized by
    * the graph's monitor. */
   bool mMicrophoneActive;
 
+
+#ifdef XP_MACOSX
+
   /* Implements the workaround for the osx audio stack when changing output
    * devices. See comments in .cpp */
   bool OSXDeviceSwitchingWorkaround();
   /* Self-reference that keep this driver alive when switching output audio
    * device and making the graph running temporarily off a SystemClockDriver.  */
   SelfReference<AudioCallbackDriver> mSelfReference;
   /* While switching devices, we keep track of the number of callbacks received,
    * since OSX seems to still call us _sometimes_. */
--- a/dom/media/MediaStreamGraph.cpp
+++ b/dom/media/MediaStreamGraph.cpp
@@ -41,17 +41,17 @@ using namespace mozilla::dom;
 using namespace mozilla::gfx;
 using namespace mozilla::media;
 
 namespace mozilla {
 
 LazyLogModule gMediaStreamGraphLog("MediaStreamGraph");
 #define STREAM_LOG(type, msg) MOZ_LOG(gMediaStreamGraphLog, type, msg)
 
-// #define ENABLE_LIFECYCLE_LOG
+#define ENABLE_LIFECYCLE_LOG
 
 // We don't use NSPR log here because we want this interleaved with adb logcat
 // on Android/B2G
 #ifdef ENABLE_LIFECYCLE_LOG
 #  ifdef ANDROID
 #    include "android/log.h"
 #    define LIFECYCLE_LOG(...)  __android_log_print(ANDROID_LOG_INFO, "Gecko - MSG", ## __VA_ARGS__); printf(__VA_ARGS__);printf("\n");
 #  else
@@ -133,17 +133,17 @@ MediaStreamGraphImpl::RemoveStreamGraphT
 
   if (aStream->IsSuspended()) {
     mSuspendedStreams.RemoveElement(aStream);
   } else {
     mStreams.RemoveElement(aStream);
   }
 
   STREAM_LOG(LogLevel::Debug, ("Removed media stream %p from graph %p, count %lu",
-                               aStream, this, mStreams.Length()))
+                               aStream, this, mStreams.Length()));
   LIFECYCLE_LOG("Removed media stream %p from graph %p, count %lu",
                 aStream, this, mStreams.Length());
 
   NS_RELEASE(aStream); // probably destroying it
 }
 
 void
 MediaStreamGraphImpl::ExtractPendingInput(SourceMediaStream* aStream,
--- a/dom/media/MediaStreamGraph.h
+++ b/dom/media/MediaStreamGraph.h
@@ -204,16 +204,21 @@ public:
                                 TrackRate aRate, uint32_t aChannels) = 0;
   /**
    * Input data from a microphone (or other audio source.  This is not
    * guaranteed to be in any particular size chunks.
    */
   virtual void NotifyInputData(MediaStreamGraph* aGraph,
                                const AudioDataValue* aBuffer, size_t aFrames,
                                TrackRate aRate, uint32_t aChannels) = 0;
+
+  /**
+   * Called when the underlying audio device has changed.
+   */
+  virtual void DeviceChanged() = 0;
 };
 
 class AudioDataListener : public AudioDataListenerInterface {
 protected:
   // Protected destructor, to discourage deletion outside of Release():
   virtual ~AudioDataListener() {}
 
 public:
--- a/dom/media/webrtc/MediaEngineDefault.h
+++ b/dom/media/webrtc/MediaEngineDefault.h
@@ -144,16 +144,18 @@ public:
   void NotifyOutputData(MediaStreamGraph* aGraph,
                         AudioDataValue* aBuffer, size_t aFrames,
                         TrackRate aRate, uint32_t aChannels) override
   {}
   void NotifyInputData(MediaStreamGraph* aGraph,
                        const AudioDataValue* aBuffer, size_t aFrames,
                        TrackRate aRate, uint32_t aChannels) override
   {}
+  void DeviceChanged() override
+  {}
   bool IsFake() override {
     return true;
   }
 
   dom::MediaSourceEnum GetMediaSource() const override {
     return dom::MediaSourceEnum::Microphone;
   }
 
--- a/dom/media/webrtc/MediaEngineWebRTC.h
+++ b/dom/media/webrtc/MediaEngineWebRTC.h
@@ -98,16 +98,18 @@ public:
                    const MediaEnginePrefs &aPrefs,
                    const nsString& aDeviceId) override;
   void SetDirectListeners(bool aDirect) override
   {}
   void NotifyOutputData(MediaStreamGraph* aGraph,
                         AudioDataValue* aBuffer, size_t aFrames,
                         TrackRate aRate, uint32_t aChannels) override
   {}
+  void DeviceChanged() override
+  {}
   void NotifyInputData(MediaStreamGraph* aGraph,
                        const AudioDataValue* aBuffer, size_t aFrames,
                        TrackRate aRate, uint32_t aChannels) override
   {}
   void NotifyPull(MediaStreamGraph* aGraph,
                   SourceMediaStream* aSource,
                   TrackID aID,
                   StreamTime aDesiredTime,
@@ -386,16 +388,22 @@ public:
                                const AudioDataValue* aBuffer, size_t aFrames,
                                TrackRate aRate, uint32_t aChannels) override
   {
     MutexAutoLock lock(mMutex);
     if (mAudioSource) {
       mAudioSource->NotifyInputData(aGraph, aBuffer, aFrames, aRate, aChannels);
     }
   }
+  virtual void DeviceChanged() override
+  {
+    if (mAudioSource) {
+      mAudioSource->DeviceChanged();
+    }
+  }
 
   void Shutdown()
   {
     MutexAutoLock lock(mMutex);
     mAudioSource = nullptr;
   }
 
 private:
@@ -466,16 +474,18 @@ public:
   // AudioDataListenerInterface methods
   void NotifyOutputData(MediaStreamGraph* aGraph,
                         AudioDataValue* aBuffer, size_t aFrames,
                         TrackRate aRate, uint32_t aChannels) override;
   void NotifyInputData(MediaStreamGraph* aGraph,
                        const AudioDataValue* aBuffer, size_t aFrames,
                        TrackRate aRate, uint32_t aChannels) override;
 
+  void DeviceChanged() override;
+
   bool IsFake() override {
     return false;
   }
 
   dom::MediaSourceEnum GetMediaSource() const override {
     return dom::MediaSourceEnum::Microphone;
   }
 
--- a/dom/media/webrtc/MediaEngineWebRTCAudio.cpp
+++ b/dom/media/webrtc/MediaEngineWebRTCAudio.cpp
@@ -494,16 +494,56 @@ MediaEngineWebRTCMicrophoneSource::Notif
     }
     int16_t *packet = mInputBuffer.get();
     mPacketizer->Output(packet);
 
     mVoERender->ExternalRecordingInsertData(packet, samplesPerPacket, aRate, 0);
   }
 }
 
+#define ResetProcessingIfNeeded(_processing)                        \
+do {                                                                \
+  webrtc::_processing##Modes mode;                                  \
+  int rv = mVoEProcessing->Get##_processing##Status(enabled, mode); \
+  if (rv) {                                                         \
+    NS_WARNING("Could not get the status of the "                   \
+     #_processing " on device change.");                            \
+    return;                                                         \
+  }                                                                 \
+                                                                    \
+  if (enabled) {                                                    \
+    rv = mVoEProcessing->Set##_processing##Status(!enabled);        \
+    if (rv) {                                                       \
+      NS_WARNING("Could not reset the status of the "               \
+      #_processing " on device change.");                           \
+      return;                                                       \
+    }                                                               \
+                                                                    \
+    rv = mVoEProcessing->Set##_processing##Status(enabled);         \
+    if (rv) {                                                       \
+      NS_WARNING("Could not reset the status of the "               \
+      #_processing " on device change.");                           \
+      return;                                                       \
+    }                                                               \
+  }                                                                 \
+}  while(0)
+
+
+
+
+
+void
+MediaEngineWebRTCMicrophoneSource::DeviceChanged() {
+  // Reset some processing
+  bool enabled;
+  ResetProcessingIfNeeded(Agc);
+  ResetProcessingIfNeeded(Ec);
+  ResetProcessingIfNeeded(Ns);
+}
+
 void
 MediaEngineWebRTCMicrophoneSource::Init()
 {
   mVoEBase = webrtc::VoEBase::GetInterface(mVoiceEngine);
 
   mVoEBase->Init();
 
   mVoERender = webrtc::VoEExternalMedia::GetInterface(mVoiceEngine);