Backed out changeset 1f410dde84d9 (bug 984498)
authorCarsten "Tomcat" Book <cbook@mozilla.com>
Wed, 23 Apr 2014 08:20:37 +0200
changeset 199277 215d847f679c98f29cfc622486ccd809313be8a7
parent 199276 52a236cf50bddbe95f16b6bfac89ae2dc0bf1b4c
child 199278 1c5860c1ca5148f89499b5bd2125ea205c6f124e
push id486
push userasasaki@mozilla.com
push dateMon, 14 Jul 2014 18:39:42 +0000
treeherdermozilla-release@d33428174ff1 [default view] [failures only]
perfherder[talos] [build metrics] [platform microbench] (compared to previous push)
bugs984498
milestone31.0a1
backs out1f410dde84d90500808d647da4463e55e6c677b7
first release with
nightly linux32
nightly linux64
nightly mac
nightly win32
nightly win64
last release without
nightly linux32
nightly linux64
nightly mac
nightly win32
nightly win64
Backed out changeset 1f410dde84d9 (bug 984498)
content/media/MediaStreamGraph.cpp
content/media/MediaStreamGraph.h
content/media/webaudio/AudioContext.cpp
content/media/webaudio/AudioContext.h
content/media/webaudio/AudioDestinationNode.cpp
content/media/webaudio/AudioDestinationNode.h
content/media/webaudio/test/test_mozaudiochannel.html
--- a/content/media/MediaStreamGraph.cpp
+++ b/content/media/MediaStreamGraph.cpp
@@ -843,17 +843,17 @@ MediaStreamGraphImpl::CreateOrDestroyAud
         audioOutputStream->mAudioPlaybackStartTime = aAudioOutputStartTime;
         audioOutputStream->mBlockedAudioTime = 0;
         audioOutputStream->mLastTickWritten = 0;
         audioOutputStream->mStream = new AudioStream();
         // XXX for now, allocate stereo output. But we need to fix this to
         // match the system's ideal channel configuration.
         // NOTE: we presume this is either fast or async-under-the-covers
         audioOutputStream->mStream->Init(2, IdealAudioRate(),
-                                         aStream->mAudioChannelType,
+                                         AudioChannel::Normal,
                                          AudioStream::LowLatency);
         audioOutputStream->mTrackID = tracks->GetID();
 
         LogLatency(AsyncLatencyLogger::AudioStreamCreate,
                    reinterpret_cast<uint64_t>(aStream),
                    reinterpret_cast<int64_t>(audioOutputStream->mStream.get()));
       }
     }
--- a/content/media/MediaStreamGraph.h
+++ b/content/media/MediaStreamGraph.h
@@ -14,17 +14,16 @@
 #include "StreamBuffer.h"
 #include "TimeVarying.h"
 #include "VideoFrameContainer.h"
 #include "VideoSegment.h"
 #include "MainThreadUtils.h"
 #include "nsAutoRef.h"
 #include "speex/speex_resampler.h"
 #include "AudioMixer.h"
-#include "mozilla/dom/AudioChannelBinding.h"
 
 class nsIRunnable;
 
 template <>
 class nsAutoRefTraits<SpeexResamplerState> : public nsPointerRefTraits<SpeexResamplerState>
 {
   public:
   static void Release(SpeexResamplerState* aState) { speex_resampler_destroy(aState); }
@@ -520,20 +519,16 @@ public:
   virtual bool MainThreadNeedsUpdates() const
   {
     return true;
   }
 
   virtual size_t SizeOfExcludingThis(MallocSizeOf aMallocSizeOf) const;
   virtual size_t SizeOfIncludingThis(MallocSizeOf aMallocSizeOf) const;
 
-  void SetAudioChannelType(dom::AudioChannel aType) {
-    mAudioChannelType = aType;
-  }
-
 protected:
   virtual void AdvanceTimeVaryingValuesToCurrentTime(GraphTime aCurrentTime, GraphTime aBlockedTime)
   {
     mBufferStartTime += aBlockedTime;
     mGraphUpdateIndices.InsertTimeAtStart(aBlockedTime);
     mGraphUpdateIndices.AdvanceCurrentTime(aCurrentTime);
     mExplicitBlockerCount.AdvanceCurrentTime(aCurrentTime);
 
@@ -650,18 +645,16 @@ protected:
   DOMMediaStream* mWrapper;
   // Main-thread views of state
   StreamTime mMainThreadCurrentTime;
   bool mMainThreadFinished;
   bool mMainThreadDestroyed;
 
   // Our media stream graph
   MediaStreamGraphImpl* mGraph;
-
-  dom::AudioChannel mAudioChannelType;
 };
 
 /**
  * This is a stream into which a decoder can write audio and video.
  *
  * Audio and video can be written on any thread, but you probably want to
  * always write from the same thread to avoid unexpected interleavings.
  */
--- a/content/media/webaudio/AudioContext.cpp
+++ b/content/media/webaudio/AudioContext.cpp
@@ -72,34 +72,33 @@ static float GetSampleRateForAudioContex
   } else {
     AudioStream::InitPreferredSampleRate();
     return static_cast<float>(AudioStream::PreferredSampleRate());
   }
 }
 
 AudioContext::AudioContext(nsPIDOMWindow* aWindow,
                            bool aIsOffline,
-                           AudioChannel aChannel,
                            uint32_t aNumberOfChannels,
                            uint32_t aLength,
                            float aSampleRate)
   : DOMEventTargetHelper(aWindow)
   , mSampleRate(GetSampleRateForAudioContext(aIsOffline, aSampleRate))
   , mNumberOfChannels(aNumberOfChannels)
   , mNodeCount(0)
   , mIsOffline(aIsOffline)
   , mIsStarted(!aIsOffline)
   , mIsShutDown(false)
 {
   aWindow->AddAudioContext(this);
 
   // Note: AudioDestinationNode needs an AudioContext that must already be
   // bound to the window.
-  mDestination = new AudioDestinationNode(this, aIsOffline, aChannel,
-                                          aNumberOfChannels, aLength, aSampleRate);
+  mDestination = new AudioDestinationNode(this, aIsOffline, aNumberOfChannels,
+                                          aLength, aSampleRate);
   // We skip calling SetIsOnlyNodeForContext during mDestination's constructor,
   // because we can only call SetIsOnlyNodeForContext after mDestination has
   // been set up.
   mDestination->SetIsOnlyNodeForContext(true);
 }
 
 AudioContext::~AudioContext()
 {
@@ -135,34 +134,16 @@ AudioContext::Constructor(const GlobalOb
 
   RegisterWeakMemoryReporter(object);
 
   return object.forget();
 }
 
 /* static */ already_AddRefed<AudioContext>
 AudioContext::Constructor(const GlobalObject& aGlobal,
-                          AudioChannel aChannel,
-                          ErrorResult& aRv)
-{
-  nsCOMPtr<nsPIDOMWindow> window = do_QueryInterface(aGlobal.GetAsSupports());
-  if (!window) {
-    aRv.Throw(NS_ERROR_FAILURE);
-    return nullptr;
-  }
-
-  nsRefPtr<AudioContext> object = new AudioContext(window, false, aChannel);
-
-  RegisterWeakMemoryReporter(object);
-
-  return object.forget();
-}
-
-/* static */ already_AddRefed<AudioContext>
-AudioContext::Constructor(const GlobalObject& aGlobal,
                           uint32_t aNumberOfChannels,
                           uint32_t aLength,
                           float aSampleRate,
                           ErrorResult& aRv)
 {
   nsCOMPtr<nsPIDOMWindow> window = do_QueryInterface(aGlobal.GetAsSupports());
   if (!window) {
     aRv.Throw(NS_ERROR_FAILURE);
@@ -176,17 +157,16 @@ AudioContext::Constructor(const GlobalOb
       aSampleRate >= TRACK_RATE_MAX) {
     // The DOM binding protects us against infinity and NaN
     aRv.Throw(NS_ERROR_DOM_NOT_SUPPORTED_ERR);
     return nullptr;
   }
 
   nsRefPtr<AudioContext> object = new AudioContext(window,
                                                    true,
-                                                   AudioChannel::Normal,
                                                    aNumberOfChannels,
                                                    aLength,
                                                    aSampleRate);
 
   RegisterWeakMemoryReporter(object);
 
   return object.forget();
 }
--- a/content/media/webaudio/AudioContext.h
+++ b/content/media/webaudio/AudioContext.h
@@ -62,17 +62,16 @@ class ScriptProcessorNode;
 class WaveShaperNode;
 class PeriodicWave;
 
 class AudioContext MOZ_FINAL : public DOMEventTargetHelper,
                                public nsIMemoryReporter
 {
   AudioContext(nsPIDOMWindow* aParentWindow,
                bool aIsOffline,
-               AudioChannel aChannel = AudioChannel::Normal,
                uint32_t aNumberOfChannels = 0,
                uint32_t aLength = 0,
                float aSampleRate = 0.0f);
   ~AudioContext();
 
 public:
   NS_DECL_ISUPPORTS_INHERITED
   NS_DECL_CYCLE_COLLECTION_CLASS_INHERITED(AudioContext,
@@ -91,22 +90,16 @@ public:
   virtual JSObject* WrapObject(JSContext* aCx) MOZ_OVERRIDE;
 
   using DOMEventTargetHelper::DispatchTrustedEvent;
 
   // Constructor for regular AudioContext
   static already_AddRefed<AudioContext>
   Constructor(const GlobalObject& aGlobal, ErrorResult& aRv);
 
-  // Constructor for regular AudioContext. A default audio channel is needed.
-  static already_AddRefed<AudioContext>
-  Constructor(const GlobalObject& aGlobal,
-              AudioChannel aChannel,
-              ErrorResult& aRv);
-
   // Constructor for offline AudioContext
   static already_AddRefed<AudioContext>
   Constructor(const GlobalObject& aGlobal,
               uint32_t aNumberOfChannels,
               uint32_t aLength,
               float aSampleRate,
               ErrorResult& aRv);
 
--- a/content/media/webaudio/AudioDestinationNode.cpp
+++ b/content/media/webaudio/AudioDestinationNode.cpp
@@ -223,17 +223,16 @@ NS_INTERFACE_MAP_BEGIN_CYCLE_COLLECTION_
   NS_INTERFACE_MAP_ENTRY(nsISupportsWeakReference)
 NS_INTERFACE_MAP_END_INHERITING(AudioNode)
 
 NS_IMPL_ADDREF_INHERITED(AudioDestinationNode, AudioNode)
 NS_IMPL_RELEASE_INHERITED(AudioDestinationNode, AudioNode)
 
 AudioDestinationNode::AudioDestinationNode(AudioContext* aContext,
                                            bool aIsOffline,
-                                           AudioChannel aChannel,
                                            uint32_t aNumberOfChannels,
                                            uint32_t aLength,
                                            float aSampleRate)
   : AudioNode(aContext,
               aIsOffline ? aNumberOfChannels : 2,
               ChannelCountMode::Explicit,
               ChannelInterpretation::Speakers)
   , mFramesToProduce(aLength)
@@ -248,23 +247,23 @@ AudioDestinationNode::AudioDestinationNo
                             MediaStreamGraph::CreateNonRealtimeInstance() :
                             MediaStreamGraph::GetInstance();
   AudioNodeEngine* engine = aIsOffline ?
                             new OfflineDestinationNodeEngine(this, aNumberOfChannels,
                                                              aLength, aSampleRate) :
                             static_cast<AudioNodeEngine*>(new DestinationNodeEngine(this));
 
   mStream = graph->CreateAudioNodeStream(engine, MediaStreamGraph::EXTERNAL_STREAM);
-  mStream->SetAudioChannelType(aChannel);
   mStream->AddMainThreadListener(this);
   mStream->AddAudioOutput(&gWebAudioOutputKey);
 
-  if (aChannel != AudioChannel::Normal) {
+  AudioChannel channel = AudioChannelService::GetDefaultAudioChannel();
+  if (channel != AudioChannel::Normal) {
     ErrorResult rv;
-    SetMozAudioChannelType(aChannel, rv);
+    SetMozAudioChannelType(channel, rv);
   }
 
   if (!aIsOffline && UseAudioChannelService()) {
     nsCOMPtr<nsIDOMEventTarget> target = do_QueryInterface(GetOwner());
     if (target) {
       target->AddSystemEventListener(NS_LITERAL_STRING("visibilitychange"), this,
                                      /* useCapture = */ true,
                                      /* wantsUntrusted = */ false);
--- a/content/media/webaudio/AudioDestinationNode.h
+++ b/content/media/webaudio/AudioDestinationNode.h
@@ -25,17 +25,16 @@ class AudioDestinationNode : public Audi
                            , public nsSupportsWeakReference
                            , public MainThreadMediaStreamListener
 {
 public:
   // This node type knows what MediaStreamGraph to use based on
   // whether it's in offline mode.
   AudioDestinationNode(AudioContext* aContext,
                        bool aIsOffline,
-                       AudioChannel aChannel = AudioChannel::Normal,
                        uint32_t aNumberOfChannels = 0,
                        uint32_t aLength = 0,
                        float aSampleRate = 0.0f);
 
   virtual void DestroyMediaStream() MOZ_OVERRIDE;
 
   NS_DECL_ISUPPORTS_INHERITED
   NS_DECL_CYCLE_COLLECTION_CLASS_INHERITED(AudioDestinationNode, AudioNode)
--- a/content/media/webaudio/test/test_mozaudiochannel.html
+++ b/content/media/webaudio/test/test_mozaudiochannel.html
@@ -61,17 +61,17 @@ function test_permission(aChannel) {
 }
 
 function test_preferences(aChannel) {
   SpecialPowers.pushPrefEnv({"set": [["media.defaultAudioChannel", aChannel ]]},
     function() {
       SpecialPowers.pushPermissions(
         [{ "type": "audio-channel-" + aChannel, "allow": false, "context": document }],
         function() {
-          var ac = new AudioContext(aChannel);
+          var ac = new AudioContext();
           ok(ac, "AudioContext created");
           is(ac.mozAudioChannelType, aChannel, "Default ac channel == '" + aChannel + "'");
           runTest();
         }
       );
     }
   );
 }