Backed out changeset c07bced21c89 (bug 984498) for mochitest-1 bustage on a CLOSED TREE
authorCarsten "Tomcat" Book <cbook@mozilla.com>
Fri, 25 Apr 2014 12:09:54 +0200
changeset 198705 c5e1fc3304208581e123ac363905ec4f4bf4c24f
parent 198704 3c2c079144c4298485b1995776a4d161d397e59b
child 198706 6c67c3dec3d0a5a674715c733ce1f81c7d23bb64
push id3624
push userasasaki@mozilla.com
push dateMon, 09 Jun 2014 21:49:01 +0000
treeherdermozilla-beta@b1a5da15899a [default view] [failures only]
perfherder[talos] [build metrics] [platform microbench] (compared to previous push)
bugs984498
milestone31.0a1
backs outc07bced21c89b7f3745c2b1cd6300ca6c175a129
first release with
nightly linux32
nightly linux64
nightly mac
nightly win32
nightly win64
last release without
nightly linux32
nightly linux64
nightly mac
nightly win32
nightly win64
Backed out changeset c07bced21c89 (bug 984498) for mochitest-1 bustage on a CLOSED TREE
content/media/MediaStreamGraph.cpp
content/media/MediaStreamGraph.h
content/media/webaudio/AudioContext.cpp
content/media/webaudio/AudioContext.h
content/media/webaudio/AudioDestinationNode.cpp
content/media/webaudio/AudioDestinationNode.h
content/media/webaudio/test/test_mozaudiochannel.html
--- a/content/media/MediaStreamGraph.cpp
+++ b/content/media/MediaStreamGraph.cpp
@@ -844,17 +844,17 @@ MediaStreamGraphImpl::CreateOrDestroyAud
         audioOutputStream->mAudioPlaybackStartTime = aAudioOutputStartTime;
         audioOutputStream->mBlockedAudioTime = 0;
         audioOutputStream->mLastTickWritten = 0;
         audioOutputStream->mStream = new AudioStream();
         // XXX for now, allocate stereo output. But we need to fix this to
         // match the system's ideal channel configuration.
         // NOTE: we presume this is either fast or async-under-the-covers
         audioOutputStream->mStream->Init(2, mSampleRate,
-                                         aStream->mAudioChannelType,
+                                         AudioChannel::Normal,
                                          AudioStream::LowLatency);
         audioOutputStream->mTrackID = tracks->GetID();
 
         LogLatency(AsyncLatencyLogger::AudioStreamCreate,
                    reinterpret_cast<uint64_t>(aStream),
                    reinterpret_cast<int64_t>(audioOutputStream->mStream.get()));
       }
     }
@@ -1779,17 +1779,16 @@ MediaStream::MediaStream(DOMMediaStream*
   , mNotifiedBlocked(false)
   , mHasCurrentData(false)
   , mNotifiedHasCurrentData(false)
   , mWrapper(aWrapper)
   , mMainThreadCurrentTime(0)
   , mMainThreadFinished(false)
   , mMainThreadDestroyed(false)
   , mGraph(nullptr)
-  , mAudioChannelType(dom::AudioChannel::Normal)
 {
   MOZ_COUNT_CTOR(MediaStream);
   // aWrapper should not already be connected to a MediaStream! It needs
   // to be hooked up to this stream, and since this stream is only just
   // being created now, aWrapper must not be connected to anything.
   NS_ASSERTION(!aWrapper || !aWrapper->GetStream(),
                "Wrapper already has another media stream hooked up to it!");
 }
--- a/content/media/MediaStreamGraph.h
+++ b/content/media/MediaStreamGraph.h
@@ -14,17 +14,16 @@
 #include "StreamBuffer.h"
 #include "TimeVarying.h"
 #include "VideoFrameContainer.h"
 #include "VideoSegment.h"
 #include "MainThreadUtils.h"
 #include "nsAutoRef.h"
 #include "speex/speex_resampler.h"
 #include "AudioMixer.h"
-#include "mozilla/dom/AudioChannelBinding.h"
 
 class nsIRunnable;
 
 template <>
 class nsAutoRefTraits<SpeexResamplerState> : public nsPointerRefTraits<SpeexResamplerState>
 {
   public:
   static void Release(SpeexResamplerState* aState) { speex_resampler_destroy(aState); }
@@ -520,18 +519,16 @@ public:
   virtual bool MainThreadNeedsUpdates() const
   {
     return true;
   }
 
   virtual size_t SizeOfExcludingThis(MallocSizeOf aMallocSizeOf) const;
   virtual size_t SizeOfIncludingThis(MallocSizeOf aMallocSizeOf) const;
 
-  void SetAudioChannelType(dom::AudioChannel aType) { mAudioChannelType = aType; }
-
 protected:
   virtual void AdvanceTimeVaryingValuesToCurrentTime(GraphTime aCurrentTime, GraphTime aBlockedTime)
   {
     mBufferStartTime += aBlockedTime;
     mGraphUpdateIndices.InsertTimeAtStart(aBlockedTime);
     mGraphUpdateIndices.AdvanceCurrentTime(aCurrentTime);
     mExplicitBlockerCount.AdvanceCurrentTime(aCurrentTime);
 
@@ -648,18 +645,16 @@ protected:
   DOMMediaStream* mWrapper;
   // Main-thread views of state
   StreamTime mMainThreadCurrentTime;
   bool mMainThreadFinished;
   bool mMainThreadDestroyed;
 
   // Our media stream graph
   MediaStreamGraphImpl* mGraph;
-
-  dom::AudioChannel mAudioChannelType;
 };
 
 /**
  * This is a stream into which a decoder can write audio and video.
  *
  * Audio and video can be written on any thread, but you probably want to
  * always write from the same thread to avoid unexpected interleavings.
  */
--- a/content/media/webaudio/AudioContext.cpp
+++ b/content/media/webaudio/AudioContext.cpp
@@ -72,34 +72,33 @@ static float GetSampleRateForAudioContex
   } else {
     AudioStream::InitPreferredSampleRate();
     return static_cast<float>(AudioStream::PreferredSampleRate());
   }
 }
 
 AudioContext::AudioContext(nsPIDOMWindow* aWindow,
                            bool aIsOffline,
-                           AudioChannel aChannel,
                            uint32_t aNumberOfChannels,
                            uint32_t aLength,
                            float aSampleRate)
   : DOMEventTargetHelper(aWindow)
   , mSampleRate(GetSampleRateForAudioContext(aIsOffline, aSampleRate))
   , mNumberOfChannels(aNumberOfChannels)
   , mNodeCount(0)
   , mIsOffline(aIsOffline)
   , mIsStarted(!aIsOffline)
   , mIsShutDown(false)
 {
   aWindow->AddAudioContext(this);
 
   // Note: AudioDestinationNode needs an AudioContext that must already be
   // bound to the window.
-  mDestination = new AudioDestinationNode(this, aIsOffline, aChannel,
-                                          aNumberOfChannels, aLength, aSampleRate);
+  mDestination = new AudioDestinationNode(this, aIsOffline, aNumberOfChannels,
+                                          aLength, aSampleRate);
   // We skip calling SetIsOnlyNodeForContext during mDestination's constructor,
   // because we can only call SetIsOnlyNodeForContext after mDestination has
   // been set up.
   mDestination->SetIsOnlyNodeForContext(true);
 }
 
 AudioContext::~AudioContext()
 {
@@ -135,34 +134,16 @@ AudioContext::Constructor(const GlobalOb
 
   RegisterWeakMemoryReporter(object);
 
   return object.forget();
 }
 
 /* static */ already_AddRefed<AudioContext>
 AudioContext::Constructor(const GlobalObject& aGlobal,
-                          AudioChannel aChannel,
-                          ErrorResult& aRv)
-{
-  nsCOMPtr<nsPIDOMWindow> window = do_QueryInterface(aGlobal.GetAsSupports());
-  if (!window) {
-    aRv.Throw(NS_ERROR_FAILURE);
-    return nullptr;
-  }
-
-  nsRefPtr<AudioContext> object = new AudioContext(window, false, aChannel);
-
-  RegisterWeakMemoryReporter(object);
-
-  return object.forget();
-}
-
-/* static */ already_AddRefed<AudioContext>
-AudioContext::Constructor(const GlobalObject& aGlobal,
                           uint32_t aNumberOfChannels,
                           uint32_t aLength,
                           float aSampleRate,
                           ErrorResult& aRv)
 {
   nsCOMPtr<nsPIDOMWindow> window = do_QueryInterface(aGlobal.GetAsSupports());
   if (!window) {
     aRv.Throw(NS_ERROR_FAILURE);
@@ -176,17 +157,16 @@ AudioContext::Constructor(const GlobalOb
       aSampleRate >= TRACK_RATE_MAX) {
     // The DOM binding protects us against infinity and NaN
     aRv.Throw(NS_ERROR_DOM_NOT_SUPPORTED_ERR);
     return nullptr;
   }
 
   nsRefPtr<AudioContext> object = new AudioContext(window,
                                                    true,
-                                                   AudioChannel::Normal,
                                                    aNumberOfChannels,
                                                    aLength,
                                                    aSampleRate);
 
   RegisterWeakMemoryReporter(object);
 
   return object.forget();
 }
--- a/content/media/webaudio/AudioContext.h
+++ b/content/media/webaudio/AudioContext.h
@@ -62,17 +62,16 @@ class ScriptProcessorNode;
 class WaveShaperNode;
 class PeriodicWave;
 
 class AudioContext MOZ_FINAL : public DOMEventTargetHelper,
                                public nsIMemoryReporter
 {
   AudioContext(nsPIDOMWindow* aParentWindow,
                bool aIsOffline,
-               AudioChannel aChannel = AudioChannel::Normal,
                uint32_t aNumberOfChannels = 0,
                uint32_t aLength = 0,
                float aSampleRate = 0.0f);
   ~AudioContext();
 
 public:
   NS_DECL_ISUPPORTS_INHERITED
   NS_DECL_CYCLE_COLLECTION_CLASS_INHERITED(AudioContext,
@@ -91,22 +90,16 @@ public:
   virtual JSObject* WrapObject(JSContext* aCx) MOZ_OVERRIDE;
 
   using DOMEventTargetHelper::DispatchTrustedEvent;
 
   // Constructor for regular AudioContext
   static already_AddRefed<AudioContext>
   Constructor(const GlobalObject& aGlobal, ErrorResult& aRv);
 
-  // Constructor for regular AudioContext. A default audio channel is needed.
-  static already_AddRefed<AudioContext>
-  Constructor(const GlobalObject& aGlobal,
-              AudioChannel aChannel,
-              ErrorResult& aRv);
-
   // Constructor for offline AudioContext
   static already_AddRefed<AudioContext>
   Constructor(const GlobalObject& aGlobal,
               uint32_t aNumberOfChannels,
               uint32_t aLength,
               float aSampleRate,
               ErrorResult& aRv);
 
--- a/content/media/webaudio/AudioDestinationNode.cpp
+++ b/content/media/webaudio/AudioDestinationNode.cpp
@@ -223,17 +223,16 @@ NS_INTERFACE_MAP_BEGIN_CYCLE_COLLECTION_
   NS_INTERFACE_MAP_ENTRY(nsISupportsWeakReference)
 NS_INTERFACE_MAP_END_INHERITING(AudioNode)
 
 NS_IMPL_ADDREF_INHERITED(AudioDestinationNode, AudioNode)
 NS_IMPL_RELEASE_INHERITED(AudioDestinationNode, AudioNode)
 
 AudioDestinationNode::AudioDestinationNode(AudioContext* aContext,
                                            bool aIsOffline,
-                                           AudioChannel aChannel,
                                            uint32_t aNumberOfChannels,
                                            uint32_t aLength,
                                            float aSampleRate)
   : AudioNode(aContext,
               aIsOffline ? aNumberOfChannels : 2,
               ChannelCountMode::Explicit,
               ChannelInterpretation::Speakers)
   , mFramesToProduce(aLength)
@@ -248,23 +247,23 @@ AudioDestinationNode::AudioDestinationNo
                             MediaStreamGraph::CreateNonRealtimeInstance(aSampleRate) :
                             MediaStreamGraph::GetInstance();
   AudioNodeEngine* engine = aIsOffline ?
                             new OfflineDestinationNodeEngine(this, aNumberOfChannels,
                                                              aLength, aSampleRate) :
                             static_cast<AudioNodeEngine*>(new DestinationNodeEngine(this));
 
   mStream = graph->CreateAudioNodeStream(engine, MediaStreamGraph::EXTERNAL_STREAM);
-  mStream->SetAudioChannelType(aChannel);
   mStream->AddMainThreadListener(this);
   mStream->AddAudioOutput(&gWebAudioOutputKey);
 
-  if (aChannel != AudioChannel::Normal) {
+  AudioChannel channel = AudioChannelService::GetDefaultAudioChannel();
+  if (channel != AudioChannel::Normal) {
     ErrorResult rv;
-    SetMozAudioChannelType(aChannel, rv);
+    SetMozAudioChannelType(channel, rv);
   }
 
   if (!aIsOffline && UseAudioChannelService()) {
     nsCOMPtr<nsIDOMEventTarget> target = do_QueryInterface(GetOwner());
     if (target) {
       target->AddSystemEventListener(NS_LITERAL_STRING("visibilitychange"), this,
                                      /* useCapture = */ true,
                                      /* wantsUntrusted = */ false);
--- a/content/media/webaudio/AudioDestinationNode.h
+++ b/content/media/webaudio/AudioDestinationNode.h
@@ -25,17 +25,16 @@ class AudioDestinationNode : public Audi
                            , public nsSupportsWeakReference
                            , public MainThreadMediaStreamListener
 {
 public:
   // This node type knows what MediaStreamGraph to use based on
   // whether it's in offline mode.
   AudioDestinationNode(AudioContext* aContext,
                        bool aIsOffline,
-                       AudioChannel aChannel = AudioChannel::Normal,
                        uint32_t aNumberOfChannels = 0,
                        uint32_t aLength = 0,
                        float aSampleRate = 0.0f);
 
   virtual void DestroyMediaStream() MOZ_OVERRIDE;
 
   NS_DECL_ISUPPORTS_INHERITED
   NS_DECL_CYCLE_COLLECTION_CLASS_INHERITED(AudioDestinationNode, AudioNode)
--- a/content/media/webaudio/test/test_mozaudiochannel.html
+++ b/content/media/webaudio/test/test_mozaudiochannel.html
@@ -61,17 +61,17 @@ function test_permission(aChannel) {
 }
 
 function test_preferences(aChannel) {
   SpecialPowers.pushPrefEnv({"set": [["media.defaultAudioChannel", aChannel ]]},
     function() {
       SpecialPowers.pushPermissions(
         [{ "type": "audio-channel-" + aChannel, "allow": false, "context": document }],
         function() {
-          var ac = new AudioContext(aChannel);
+          var ac = new AudioContext();
           ok(ac, "AudioContext created");
           is(ac.mozAudioChannelType, aChannel, "Default ac channel == '" + aChannel + "'");
           runTest();
         }
       );
     }
   );
 }