Bug 1487057 - Part 4 - Remove AsyncLatencyLogger and associated code. r=pehrsons
authorPaul Adenot <paul@paul.cx>
Thu, 30 Aug 2018 17:11:57 +0200
changeset 489223 304b4f68b942eb98cddfda6c58376332772b3e72
parent 489222 2ff222ff2a723fc90210e9bd84968333c0fa5f86
child 489224 746f70de2691bb500afd00daa6cc4d6bf20b07dd
push id247
push userfmarier@mozilla.com
push dateSat, 27 Oct 2018 01:06:44 +0000
reviewerspehrsons
bugs1487057
milestone64.0a1
Bug 1487057 - Part 4 - Remove AsyncLatencyLogger and associated code. r=pehrsons It's not maintained and probably does not work anymore. Differential Revision: https://phabricator.services.mozilla.com/D5438
dom/media/AudioSegment.cpp
dom/media/AudioSegment.h
dom/media/Latency.cpp
dom/media/Latency.h
dom/media/MediaManager.cpp
dom/media/MediaSegment.h
dom/media/MediaStreamGraph.cpp
dom/media/MediaStreamGraphImpl.h
dom/media/moz.build
dom/media/test/graph_latency.py
dom/media/webrtc/MediaEngineWebRTCAudio.cpp
dom/media/webrtc/MediaEngineWebRTCAudio.h
layout/build/nsLayoutStatics.cpp
media/webrtc/signaling/src/media-conduit/AudioConduit.cpp
media/webrtc/signaling/src/media-conduit/AudioConduit.h
media/webrtc/signaling/src/mediapipeline/MediaPipeline.cpp
media/webrtc/signaling/test/FakeMediaStreamsImpl.h
--- a/dom/media/AudioSegment.cpp
+++ b/dom/media/AudioSegment.cpp
@@ -2,17 +2,16 @@
 /* This Source Code Form is subject to the terms of the Mozilla Public
  * License, v. 2.0. If a copy of the MPL was not distributed with this file,
  * You can obtain one at http://mozilla.org/MPL/2.0/. */
 
 #include "AudioSegment.h"
 
 #include "AudioMixer.h"
 #include "AudioChannelFormat.h"
-#include "Latency.h"
 #include <speex/speex_resampler.h>
 
 namespace mozilla {
 
 const uint8_t SilentChannel::gZeroChannel[MAX_AUDIO_SAMPLE_SIZE*SilentChannel::AUDIO_PROCESSING_FRAMES] = {0};
 
 template<>
 const float* SilentChannel::ZeroChannel<float>()
@@ -159,17 +158,17 @@ AudioSegment::Mix(AudioMixer& aMixer, ui
   if (offsetSamples) {
     MOZ_ASSERT(offsetSamples == outBufferLength / aOutputChannels,
                "We forgot to write some samples?");
     aMixer.Mix(buf.Elements(), aOutputChannels, offsetSamples, aSampleRate);
   }
 }
 
 void
-AudioSegment::WriteTo(uint64_t aID, AudioMixer& aMixer, uint32_t aOutputChannels, uint32_t aSampleRate)
+AudioSegment::WriteTo(AudioMixer& aMixer, uint32_t aOutputChannels, uint32_t aSampleRate)
 {
   AutoTArray<AudioDataValue,SilentChannel::AUDIO_PROCESSING_FRAMES*GUESS_AUDIO_CHANNELS> buf;
   // Offset in the buffer that will be written to the mixer, in samples.
   uint32_t offset = 0;
 
   if (GetDuration() <= 0) {
     MOZ_ASSERT(GetDuration() == 0);
     return;
@@ -193,23 +192,16 @@ AudioSegment::WriteTo(uint64_t aID, Audi
         // The mixer is expecting interleaved data, so this is ok.
         PodZero(buf.Elements() + offset, c.mDuration * aOutputChannels);
         break;
       default:
         MOZ_ASSERT(false, "Not handled");
     }
 
     offset += c.mDuration * aOutputChannels;
-
-    if (!c.mTimeStamp.IsNull()) {
-      TimeStamp now = TimeStamp::Now();
-      // would be more efficient to c.mTimeStamp to ms on create time then pass here
-      LogTime(AsyncLatencyLogger::AudioMediaStreamTrack, aID,
-              (now - c.mTimeStamp).ToMilliseconds(), c.mTimeStamp);
-    }
   }
 
   if (offset) {
     aMixer.Mix(buf.Elements(), aOutputChannels, offset / aOutputChannels, aSampleRate);
   }
 }
 
 } // namespace mozilla
--- a/dom/media/AudioSegment.h
+++ b/dom/media/AudioSegment.h
@@ -285,19 +285,16 @@ struct AudioChunk {
 
   StreamTime mDuration = 0; // in frames within the buffer
   RefPtr<ThreadSharedObject> mBuffer; // the buffer object whose lifetime is managed; null means data is all zeroes
   // one pointer per channel; empty if and only if mBuffer is null
   AutoTArray<const void*,GUESS_AUDIO_CHANNELS> mChannelData;
   float mVolume = 1.0f; // volume multiplier to apply
   // format of frames in mBuffer (or silence if mBuffer is null)
   SampleFormat mBufferFormat = AUDIO_FORMAT_SILENCE;
-#ifdef MOZILLA_INTERNAL_API
-  mozilla::TimeStamp mTimeStamp;           // time at which this has been fetched from the MediaEngine
-#endif
   // principalHandle for the data in this chunk.
   // This can be compared to an nsIPrincipal* when back on main thread.
   PrincipalHandle mPrincipalHandle = PRINCIPAL_HANDLE_NONE;
 };
 
 /**
  * A list of audio samples consisting of a sequence of slices of SharedBuffers.
  * The audio rate is determined by the track, not stored in this class.
@@ -379,63 +376,54 @@ public:
     chunk->mBuffer = aBuffer;
 
     MOZ_ASSERT(chunk->mBuffer || aChannelData.IsEmpty(), "Appending invalid data ?");
 
     for (uint32_t channel = 0; channel < aChannelData.Length(); ++channel) {
       chunk->mChannelData.AppendElement(aChannelData[channel]);
     }
     chunk->mBufferFormat = AUDIO_FORMAT_FLOAT32;
-#ifdef MOZILLA_INTERNAL_API
-    chunk->mTimeStamp = TimeStamp::Now();
-#endif
     chunk->mPrincipalHandle = aPrincipalHandle;
   }
   void AppendFrames(already_AddRefed<ThreadSharedObject> aBuffer,
                     const nsTArray<const int16_t*>& aChannelData,
                     int32_t aDuration, const PrincipalHandle& aPrincipalHandle)
   {
     AudioChunk* chunk = AppendChunk(aDuration);
     chunk->mBuffer = aBuffer;
 
     MOZ_ASSERT(chunk->mBuffer || aChannelData.IsEmpty(), "Appending invalid data ?");
 
     for (uint32_t channel = 0; channel < aChannelData.Length(); ++channel) {
       chunk->mChannelData.AppendElement(aChannelData[channel]);
     }
     chunk->mBufferFormat = AUDIO_FORMAT_S16;
-#ifdef MOZILLA_INTERNAL_API
-    chunk->mTimeStamp = TimeStamp::Now();
-#endif
     chunk->mPrincipalHandle = aPrincipalHandle;
 
   }
   // Consumes aChunk, and returns a pointer to the persistent copy of aChunk
   // in the segment.
   AudioChunk* AppendAndConsumeChunk(AudioChunk* aChunk)
   {
     AudioChunk* chunk = AppendChunk(aChunk->mDuration);
     chunk->mBuffer = aChunk->mBuffer.forget();
     chunk->mChannelData.SwapElements(aChunk->mChannelData);
 
     MOZ_ASSERT(chunk->mBuffer || aChunk->mChannelData.IsEmpty(), "Appending invalid data ?");
 
     chunk->mVolume = aChunk->mVolume;
     chunk->mBufferFormat = aChunk->mBufferFormat;
-#ifdef MOZILLA_INTERNAL_API
-    chunk->mTimeStamp = TimeStamp::Now();
-#endif
     chunk->mPrincipalHandle = aChunk->mPrincipalHandle;
     return chunk;
   }
   void ApplyVolume(float aVolume);
   // Mix the segment into a mixer, interleaved. This is useful to output a
   // segment to a system audio callback. It up or down mixes to aChannelCount
   // channels.
-  void WriteTo(uint64_t aID, AudioMixer& aMixer, uint32_t aChannelCount,
+  void WriteTo(AudioMixer& aMixer, uint32_t aChannelCount,
                uint32_t aSampleRate);
   // Mix the segment into a mixer, keeping it planar, up or down mixing to
   // aChannelCount channels.
   void Mix(AudioMixer& aMixer, uint32_t aChannelCount, uint32_t aSampleRate);
 
   int ChannelCount() {
     NS_WARNING_ASSERTION(
       !mChunks.IsEmpty(),
deleted file mode 100644
--- a/dom/media/Latency.cpp
+++ /dev/null
@@ -1,229 +0,0 @@
-/* -*- Mode: C++; tab-width: 2; indent-tabs-mode: nil; c-basic-offset: 2 -*- */
-/* vim:set ts=2 sw=2 sts=2 et cindent: */
-/* This Source Code Form is subject to the terms of the Mozilla Public
- * License, v. 2.0. If a copy of the MPL was not distributed with this
- * file, You can obtain one at http://mozilla.org/MPL/2.0/. */
-
-#include "Latency.h"
-#include "nsThreadUtils.h"
-#include "mozilla/Logging.h"
-#include <cmath>
-#include <algorithm>
-
-#include <mozilla/Services.h>
-#include <mozilla/StaticPtr.h>
-#include "nsContentUtils.h"
-
-using namespace mozilla;
-
-const char* LatencyLogIndex2Strings[] = {
-  "Audio MediaStreamTrack",
-  "Video MediaStreamTrack",
-  "Cubeb",
-  "AudioStream",
-  "NetEQ",
-  "AudioCapture Base",
-  "AudioCapture Samples",
-  "AudioTrackInsertion",
-  "MediaPipeline Audio Insertion",
-  "AudioTransmit",
-  "AudioReceive",
-  "MediaPipelineAudioPlayout",
-  "MediaStream Create",
-  "AudioStream Create",
-  "AudioSendRTP",
-  "AudioRecvRTP"
-};
-
-static StaticRefPtr<AsyncLatencyLogger> gAsyncLogger;
-
-LogModule*
-GetLatencyLog()
-{
-  static LazyLogModule sLog("MediaLatency");
-  return sLog;
-}
-
-class LogEvent : public Runnable
-{
-public:
-  LogEvent(AsyncLatencyLogger::LatencyLogIndex aIndex,
-           uint64_t aID,
-           int64_t aValue,
-           TimeStamp aTimeStamp)
-    : mozilla::Runnable("LogEvent")
-    , mIndex(aIndex)
-    , mID(aID)
-    , mValue(aValue)
-    , mTimeStamp(aTimeStamp)
-  {}
-  LogEvent(AsyncLatencyLogger::LatencyLogIndex aIndex,
-           uint64_t aID,
-           int64_t aValue)
-    : mozilla::Runnable("LogEvent")
-    , mIndex(aIndex)
-    , mID(aID)
-    , mValue(aValue)
-    , mTimeStamp(TimeStamp())
-  {}
-  ~LogEvent() {}
-
-  NS_IMETHOD Run() override {
-    AsyncLatencyLogger::Get(true)->WriteLog(mIndex, mID, mValue, mTimeStamp);
-    return NS_OK;
-  }
-
-protected:
-  AsyncLatencyLogger::LatencyLogIndex mIndex;
-  uint64_t mID;
-  int64_t mValue;
-  TimeStamp mTimeStamp;
-};
-
-void LogLatency(AsyncLatencyLogger::LatencyLogIndex aIndex, uint64_t aID, int64_t aValue)
-{
-  AsyncLatencyLogger::Get()->Log(aIndex, aID, aValue);
-}
-
-void LogTime(AsyncLatencyLogger::LatencyLogIndex aIndex, uint64_t aID, int64_t aValue)
-{
-  TimeStamp now = TimeStamp::Now();
-  AsyncLatencyLogger::Get()->Log(aIndex, aID, aValue, now);
-}
-
-void LogTime(AsyncLatencyLogger::LatencyLogIndex aIndex, uint64_t aID, int64_t aValue, TimeStamp &aTime)
-{
-  AsyncLatencyLogger::Get()->Log(aIndex, aID, aValue, aTime);
-}
-
-void LogTime(uint32_t aIndex, uint64_t aID, int64_t aValue)
-{
-  LogTime(static_cast<AsyncLatencyLogger::LatencyLogIndex>(aIndex), aID, aValue);
-}
-void LogTime(uint32_t aIndex, uint64_t aID, int64_t aValue, TimeStamp &aTime)
-{
-  LogTime(static_cast<AsyncLatencyLogger::LatencyLogIndex>(aIndex), aID, aValue, aTime);
-}
-void LogLatency(uint32_t aIndex, uint64_t aID, int64_t aValue)
-{
-  LogLatency(static_cast<AsyncLatencyLogger::LatencyLogIndex>(aIndex), aID, aValue);
-}
-
-/* static */
-void AsyncLatencyLogger::InitializeStatics()
-{
-  NS_ASSERTION(NS_IsMainThread(), "Main thread only");
-
-  //Make sure that the underlying logger is allocated.
-  GetLatencyLog();
-  gAsyncLogger = new AsyncLatencyLogger();
-}
-
-/* static */
-void AsyncLatencyLogger::ShutdownLogger()
-{
-  gAsyncLogger = nullptr;
-}
-
-/* static */
-AsyncLatencyLogger* AsyncLatencyLogger::Get(bool aStartTimer)
-{
-  // Users don't generally null-check the result since we should live longer than they
-  MOZ_ASSERT(gAsyncLogger);
-
-  if (aStartTimer) {
-    gAsyncLogger->Init();
-  }
-  return gAsyncLogger;
-}
-
-NS_IMPL_ISUPPORTS(AsyncLatencyLogger, nsIObserver)
-
-AsyncLatencyLogger::AsyncLatencyLogger()
-  : mThread(nullptr),
-    mMutex("AsyncLatencyLogger")
-{
-  NS_ASSERTION(NS_IsMainThread(), "Main thread only");
-  nsContentUtils::RegisterShutdownObserver(this);
-}
-
-AsyncLatencyLogger::~AsyncLatencyLogger()
-{
-  AsyncLatencyLogger::Shutdown();
-}
-
-void AsyncLatencyLogger::Shutdown()
-{
-  nsContentUtils::UnregisterShutdownObserver(this);
-
-  MutexAutoLock lock(mMutex);
-  if (mThread) {
-    mThread->Shutdown();
-  }
-  mStart = TimeStamp(); // make sure we don't try to restart it for any reason
-}
-
-void AsyncLatencyLogger::Init()
-{
-  MutexAutoLock lock(mMutex);
-  if (mStart.IsNull()) {
-    nsresult rv = NS_NewNamedThread("Latency Logger", getter_AddRefs(mThread));
-    NS_ENSURE_SUCCESS_VOID(rv);
-    mStart = TimeStamp::Now();
-  }
-}
-
-void AsyncLatencyLogger::GetStartTime(TimeStamp &aStart)
-{
-  MutexAutoLock lock(mMutex);
-  aStart = mStart;
-}
-
-nsresult
-AsyncLatencyLogger::Observe(nsISupports* aSubject, const char* aTopic,
-                            const char16_t* aData)
-{
-  MOZ_ASSERT(NS_IsMainThread());
-  if (strcmp(aTopic, NS_XPCOM_SHUTDOWN_OBSERVER_ID) == 0) {
-    Shutdown();
-  }
-  return NS_OK;
-}
-
-// aID is a sub-identifier (in particular a specific MediaStramTrack)
-void AsyncLatencyLogger::WriteLog(LatencyLogIndex aIndex, uint64_t aID, int64_t aValue,
-                                  TimeStamp aTimeStamp)
-{
-  if (aTimeStamp.IsNull()) {
-    MOZ_LOG(GetLatencyLog(), LogLevel::Debug,
-      ("Latency: %s,%" PRIu64 ",%" PRId64 ",%" PRId64,
-       LatencyLogIndex2Strings[aIndex], aID, GetTimeStamp(), aValue));
-  } else {
-    MOZ_LOG(GetLatencyLog(), LogLevel::Debug,
-      ("Latency: %s,%" PRIu64 ",%" PRId64 ",%" PRId64 ",%" PRId64,
-       LatencyLogIndex2Strings[aIndex], aID, GetTimeStamp(), aValue,
-       static_cast<int64_t>((aTimeStamp - gAsyncLogger->mStart).ToMilliseconds())));
-  }
-}
-
-int64_t AsyncLatencyLogger::GetTimeStamp()
-{
-  TimeDuration t = TimeStamp::Now() - mStart;
-  return t.ToMilliseconds();
-}
-
-void AsyncLatencyLogger::Log(LatencyLogIndex aIndex, uint64_t aID, int64_t aValue)
-{
-  TimeStamp null;
-  Log(aIndex, aID, aValue, null);
-}
-
-void AsyncLatencyLogger::Log(LatencyLogIndex aIndex, uint64_t aID, int64_t aValue, TimeStamp &aTime)
-{
-  if (MOZ_LOG_TEST(GetLatencyLog(), LogLevel::Debug)) {
-    nsCOMPtr<nsIRunnable> event = new LogEvent(aIndex, aID, aValue, aTime);
-    if (mThread) {
-      mThread->Dispatch(event, NS_DISPATCH_NORMAL);
-    }
-  }
-}
deleted file mode 100644
--- a/dom/media/Latency.h
+++ /dev/null
@@ -1,99 +0,0 @@
-/* -*- Mode: C++; tab-width: 2; indent-tabs-mode: nil; c-basic-offset: 2 -*- */
-/* vim:set ts=2 sw=2 sts=2 et cindent: */
-/* This Source Code Form is subject to the terms of the Mozilla Public
- * License, v. 2.0. If a copy of the MPL was not distributed with this
- * file, You can obtain one at http://mozilla.org/MPL/2.0/. */
-
-#ifndef MOZILLA_LATENCY_H
-#define MOZILLA_LATENCY_H
-
-#include "mozilla/TimeStamp.h"
-#include "mozilla/Logging.h"
-#include "nsCOMPtr.h"
-#include "nsIThread.h"
-#include "mozilla/Monitor.h"
-#include "nsISupportsImpl.h"
-#include "nsIObserver.h"
-
-class AsyncLatencyLogger;
-
-mozilla::LogModule* GetLatencyLog();
-
-// This class is a singleton. It is refcounted.
-class AsyncLatencyLogger : public nsIObserver
-{
-  NS_DECL_THREADSAFE_ISUPPORTS
-  NS_DECL_NSIOBSERVER
-
-public:
-
-  enum LatencyLogIndex {
-    AudioMediaStreamTrack = 0,
-    VideoMediaStreamTrack,
-    Cubeb,
-    AudioStream,
-    NetEQ,
-    AudioCaptureBase, // base time for capturing an audio stream
-    AudioCapture, // records number of samples captured and the time
-    AudioTrackInsertion, // # of samples inserted into a mediastreamtrack and the time
-    MediaPipelineAudioInsertion, // Timestamp and time of timestamp
-    AudioTransmit, // Timestamp and socket send time
-    AudioReceive, // Timestamp and receive time
-    MediaPipelineAudioPlayout, // Timestamp and playout into MST time
-    MediaStreamCreate, // Source and TrackUnion streams
-    AudioStreamCreate, // TrackUnion stream and AudioStream
-    AudioSendRTP,
-    AudioRecvRTP,
-    _MAX_INDEX
-  };
-  // Log with a null timestamp
-  void Log(LatencyLogIndex index, uint64_t aID, int64_t aValue);
-  // Log with a timestamp
-  void Log(LatencyLogIndex index, uint64_t aID, int64_t aValue,
-           mozilla::TimeStamp &aTime);
-  // Write a log message to NSPR
-  void WriteLog(LatencyLogIndex index, uint64_t aID, int64_t aValue,
-                mozilla::TimeStamp timestamp);
-  // Get the base time used by the logger for delta calculations
-  void GetStartTime(mozilla::TimeStamp &aStart);
-
-  static AsyncLatencyLogger* Get(bool aStartTimer = false);
-  static void InitializeStatics();
-  // After this is called, the global log object may go away
-  static void ShutdownLogger();
-private:
-  AsyncLatencyLogger();
-  virtual ~AsyncLatencyLogger();
-  int64_t GetTimeStamp();
-  void Init();
-  // Shut down the thread associated with this, and make sure it doesn't
-  // start up again.
-  void Shutdown();
-  // The thread on which the IO happens
-  nsCOMPtr<nsIThread> mThread;
-  // This can be initialized on multiple threads, but is protected by a
-  // monitor. After the initialization phase, it is accessed on the log
-  // thread only.
-  mozilla::TimeStamp mStart;
-  // This monitor protects mStart and mMediaLatencyLog for the
-  // initialization sequence. It is initialized at layout startup, and
-  // destroyed at layout shutdown.
-  mozilla::Mutex mMutex;
-};
-
-// need uint32_t versions for access from webrtc/trunk code
-// Log without a time delta
-void LogLatency(AsyncLatencyLogger::LatencyLogIndex index, uint64_t aID, int64_t aValue);
-void LogLatency(uint32_t index, uint64_t aID, int64_t aValue);
-// Log TimeStamp::Now() (as delta)
-void LogTime(AsyncLatencyLogger::LatencyLogIndex index, uint64_t aID, int64_t aValue);
-void LogTime(uint32_t index, uint64_t aID, int64_t aValue);
-// Log the specified time (as delta)
-void LogTime(AsyncLatencyLogger::LatencyLogIndex index, uint64_t aID, int64_t aValue,
-             mozilla::TimeStamp &aTime);
-
-// For generating unique-ish ids for logged sources
-#define LATENCY_STREAM_ID(source, trackID) \
-  ((((uint64_t) (source)) & ~0x0F) | (trackID))
-
-#endif
--- a/dom/media/MediaManager.cpp
+++ b/dom/media/MediaManager.cpp
@@ -53,17 +53,16 @@
 #include "mozilla/dom/MediaDevices.h"
 #include "mozilla/Base64.h"
 #include "mozilla/ipc/BackgroundChild.h"
 #include "mozilla/media/MediaChild.h"
 #include "mozilla/media/MediaTaskUtils.h"
 #include "MediaTrackConstraints.h"
 #include "VideoUtils.h"
 #include "ThreadSafeRefcountingWithMainThreadDestruction.h"
-#include "Latency.h"
 #include "nsProxyRelease.h"
 #include "nsVariant.h"
 
 // For snprintf
 #include "mozilla/Sprintf.h"
 
 #include "nsJSUtils.h"
 #include "nsGlobalWindow.h"
--- a/dom/media/MediaSegment.h
+++ b/dom/media/MediaSegment.h
@@ -8,17 +8,16 @@
 
 #include "nsTArray.h"
 #include "nsIPrincipal.h"
 #include "nsProxyRelease.h"
 #ifdef MOZILLA_INTERNAL_API
 #include "mozilla/TimeStamp.h"
 #endif
 #include <algorithm>
-#include "Latency.h"
 
 namespace mozilla {
 
 /**
  * Track or graph rate in Hz. Maximum 1 << TRACK_RATE_MAX_BITS Hz. This
  * maximum avoids overflow in conversions between track rates and conversions
  * from seconds.
  */
@@ -326,19 +325,16 @@ public:
     if (aDuration <= 0) {
       return;
     }
     if (!mChunks.IsEmpty() && mChunks[0].IsNull()) {
       mChunks[0].mDuration += aDuration;
     } else {
       mChunks.InsertElementAt(0)->SetNull(aDuration);
     }
-#ifdef MOZILLA_INTERNAL_API
-    mChunks[0].mTimeStamp = mozilla::TimeStamp::Now();
-#endif
     mDuration += aDuration;
   }
   void AppendNullData(StreamTime aDuration) override
   {
     if (aDuration <= 0) {
       return;
     }
     if (!mChunks.IsEmpty() && mChunks[mChunks.Length() - 1].IsNull()) {
@@ -413,22 +409,16 @@ public:
     return nullptr;
   }
 
   void RemoveLeading(StreamTime aDuration)
   {
     RemoveLeading(aDuration, 0);
   }
 
-#ifdef MOZILLA_INTERNAL_API
-  void GetStartTime(TimeStamp &aTime) {
-    aTime = mChunks[0].mTimeStamp;
-  }
-#endif
-
   size_t SizeOfExcludingThis(MallocSizeOf aMallocSizeOf) const override
   {
     size_t amount = mChunks.ShallowSizeOfExcludingThis(aMallocSizeOf);
     for (size_t i = 0; i < mChunks.Length(); i++) {
       amount += mChunks[i].SizeOfExcludingThisIfUnshared(aMallocSizeOf);
     }
     return amount;
   }
@@ -450,19 +440,16 @@ protected:
   explicit MediaSegmentBase(Type aType)
     : MediaSegment(aType)
     , mChunks()
   {}
 
   MediaSegmentBase(MediaSegmentBase&& aSegment)
     : MediaSegment(std::move(aSegment))
     , mChunks()
-#ifdef MOZILLA_INTERNAL_API
-    , mTimeStamp(std::move(aSegment.mTimeStamp))
-#endif
   {
     mChunks.SwapElements(aSegment.mChunks);
     MOZ_ASSERT(mChunks.Capacity() >= DEFAULT_SEGMENT_CAPACITY,
                "Capacity must be retained in self after swap");
     MOZ_ASSERT(aSegment.mChunks.Capacity() >= DEFAULT_SEGMENT_CAPACITY,
                "Capacity must be retained in other after swap");
   }
 
@@ -571,16 +558,13 @@ protected:
       mChunks.RemoveElementsAt(i+1, mChunks.Length() - (i+1));
     }
     MOZ_ASSERT(mChunks.Capacity() >= DEFAULT_SEGMENT_CAPACITY,
                "Capacity must be retained after removing chunks");
     // Caller must adjust mDuration
   }
 
   AutoTArray<Chunk, DEFAULT_SEGMENT_CAPACITY> mChunks;
-#ifdef MOZILLA_INTERNAL_API
-  mozilla::TimeStamp mTimeStamp;
-#endif
 };
 
 } // namespace mozilla
 
 #endif /* MOZILLA_MEDIASEGMENT_H_ */
--- a/dom/media/MediaStreamGraph.cpp
+++ b/dom/media/MediaStreamGraph.cpp
@@ -778,21 +778,17 @@ MediaStreamGraphImpl::PlayAudio(MediaStr
           ticksWritten += toWrite;
         }
         output.ApplyVolume(volume);
       }
       t = end;
     }
     audioOutput.mLastTickWritten = offset;
 
-    // Need unique id for stream & track - and we want it to match the inserter
-    output.WriteTo(LATENCY_STREAM_ID(aStream, track->GetID()),
-                                     mMixer,
-                                     AudioOutputChannelCount(),
-                                     mSampleRate);
+    output.WriteTo(mMixer, AudioOutputChannelCount(), mSampleRate);
   }
   return ticksWritten;
 }
 
 void
 MediaStreamGraphImpl::OpenAudioInputImpl(CubebUtils::AudioDeviceID aID,
                                          AudioDataListener* aListener)
 {
@@ -3709,17 +3705,16 @@ MediaStreamGraphImpl::MediaStreamGraphIm
   , mEndTime(GRAPH_TIME_MAX)
   , mForceShutDown(false)
   , mPostedRunInStableStateEvent(false)
   , mDetectedNotRunning(false)
   , mPostedRunInStableState(false)
   , mRealtime(aDriverRequested != OFFLINE_THREAD_DRIVER)
   , mNonRealtimeProcessing(false)
   , mStreamOrderDirty(false)
-  , mLatencyLog(AsyncLatencyLogger::Get())
   , mAbstractMainThread(aMainThread)
   , mSelfRef(this)
   , mOutputChannels(std::min<uint32_t>(8, CubebUtils::MaxNumberOfChannels()))
   , mGlobalVolume(CubebUtils::GetVolumeScale())
 #ifdef DEBUG
   , mCanRunMessagesSynchronously(false)
 #endif
 {
--- a/dom/media/MediaStreamGraphImpl.h
+++ b/dom/media/MediaStreamGraphImpl.h
@@ -5,17 +5,16 @@
 
 #ifndef MOZILLA_MEDIASTREAMGRAPHIMPL_H_
 #define MOZILLA_MEDIASTREAMGRAPHIMPL_H_
 
 #include "MediaStreamGraph.h"
 
 #include "AudioMixer.h"
 #include "GraphDriver.h"
-#include "Latency.h"
 #include "mozilla/Atomics.h"
 #include "mozilla/Monitor.h"
 #include "mozilla/Services.h"
 #include "mozilla/TimeStamp.h"
 #include "mozilla/UniquePtr.h"
 #include "mozilla/WeakPtr.h"
 #include "nsClassHashtable.h"
 #include "nsIMemoryReporter.h"
@@ -881,20 +880,16 @@ public:
    * value is only accessed on the main thread.
    */
   bool mNonRealtimeProcessing;
   /**
    * True when a change has happened which requires us to recompute the stream
    * blocking order.
    */
   bool mStreamOrderDirty;
-  /**
-   * Hold a ref to the Latency logger
-   */
-  RefPtr<AsyncLatencyLogger> mLatencyLog;
   AudioMixer mMixer;
   const RefPtr<AbstractThread> mAbstractMainThread;
 
   // used to limit graph shutdown time
   // Only accessed on the main thread.
   nsCOMPtr<nsITimer> mShutdownTimer;
 
 private:
--- a/dom/media/moz.build
+++ b/dom/media/moz.build
@@ -113,17 +113,16 @@ EXPORTS += [
     'ChannelMediaDecoder.h',
     'CubebUtils.h',
     'DecoderTraits.h',
     'DOMMediaStream.h',
     'FileBlockCache.h',
     'FrameStatistics.h',
     'ImageToI420.h',
     'Intervals.h',
-    'Latency.h',
     'MediaCache.h',
     'MediaContainerType.h',
     'MediaData.h',
     'MediaDataDemuxer.h',
     'MediaDecoder.h',
     'MediaDecoderOwner.h',
     'MediaDecoderStateMachine.h',
     'MediaEventSource.h',
@@ -233,17 +232,16 @@ UNIFIED_SOURCES += [
     'ChannelMediaResource.cpp',
     'CloneableWithRangeMediaResource.cpp',
     'DOMMediaStream.cpp',
     'FileBlockCache.cpp',
     'FileMediaResource.cpp',
     'GetUserMediaRequest.cpp',
     'GraphDriver.cpp',
     'ImageToI420.cpp',
-    'Latency.cpp',
     'MediaCache.cpp',
     'MediaContainerType.cpp',
     'MediaData.cpp',
     'MediaDecoder.cpp',
     'MediaDecoderStateMachine.cpp',
     'MediaDeviceInfo.cpp',
     'MediaDevices.cpp',
     'MediaFormatReader.cpp',
deleted file mode 100644
--- a/dom/media/test/graph_latency.py
+++ /dev/null
@@ -1,104 +0,0 @@
-#!/usr/bin/env python
-# graph_latency.py - graph media latency
-#
-# This Source Code Form is subject to the terms of the Mozilla Public
-# License, v. 2.0. If a copy of the MPL was not distributed with this
-# file, You can obtain one at http://mozilla.org/MPL/2.0/.
-
-# needs matplotlib (sudo aptitude install python-matplotlib)
-
-import matplotlib.pyplot as plt
-from matplotlib import rc
-import sys
-from pprint import pprint
-import re
-
-
-# FIX!  needs to be sum of a single mediastreamtrack and any output overhead for it
-# So there is one sum per MST
-def compute_sum(data):
-    'Compute the sum for each timestamp. This expects the output of parse_data.'
-    last_values = {}
-    out = ([],[])
-
-    for i in data:
-        if i[0] not in last_values.keys():
-          last_values[i[0]] = 0
-        last_values[i[0]] = float(i[3])
-        print last_values
-        out[0].append(i[2])
-        out[1].append(sum(last_values.values()))
-    return out
-
-
-def clean_data(raw_data):
-    '''
-    Remove the PR_LOG cruft at the beginning of each line and returns a list of
-    tuple.
-    '''
-    out = []
-    for line in raw_data:
-        match = re.match(r'(.*)#(.*)', line)
-        if match:
-	    continue
-	else:
-            out.append(line.split(": ")[1])
-    return out
-
-# returns a list of tuples
-def parse_data(raw_lines):
-    '''
-    Split each line by , and put every bit in a tuple.
-    '''
-    out = []
-    for line in raw_lines:
-        out.append(line.split(','))
-    return out
-
-if len(sys.argv) == 3:
-    name = sys.argv[1]
-    channels = int(sys.argv[2])
-else:
-    print sys.argv[0] + "latency_log"
-
-try:
-    f = open(sys.argv[1])
-except:
-    print "cannot open " + name
-
-raw_lines = f.readlines()
-lines = clean_data(raw_lines)
-data = parse_data(lines)
-
-final_data = {}
-
-for tupl in data:
-    name = tupl[0]
-    if tupl[1] != 0:
-        name = name+tupl[1]
-    if name not in final_data.keys():
-        final_data[name] = ([], [])
-# sanity-check values
-    if float(tupl[3]) < 10*1000:
-        final_data[name][0].append(float(tupl[2]))
-        final_data[name][1].append(float(tupl[3]))
-
-#overall = compute_sum(data)
-#final_data["overall"] = overall
-
-pprint(final_data)
-
-fig = plt.figure()
-for i in final_data.keys():
-    plt.plot(final_data[i][0], final_data[i][1], label=i)
-
-plt.legend()
-plt.suptitle("Latency in ms (y-axis) against time in ms (x-axis).")
-
-size = fig.get_size_inches()
-# make it gigantic so we can see things. sometimes, if the graph is too big,
-# this errors. reduce the factor so it stays under 2**15.
-fig.set_size_inches((size[0]*10, size[1]*2))
-name = sys.argv[1][:-4] + ".pdf"
-fig.savefig(name)
-
--- a/dom/media/webrtc/MediaEngineWebRTCAudio.cpp
+++ b/dom/media/webrtc/MediaEngineWebRTCAudio.cpp
@@ -45,21 +45,16 @@ namespace mozilla {
 #ifdef LOG
 #undef LOG
 #endif
 
 LogModule* GetMediaManagerLog();
 #define LOG(msg) MOZ_LOG(GetMediaManagerLog(), mozilla::LogLevel::Debug, msg)
 #define LOG_FRAMES(msg) MOZ_LOG(GetMediaManagerLog(), mozilla::LogLevel::Verbose, msg)
 
-LogModule* AudioLogModule() {
-  static mozilla::LazyLogModule log("AudioLatency");
-  return static_cast<LogModule*>(log);
-}
-
 void
 WebRTCAudioDataListener::NotifyOutputData(MediaStreamGraphImpl* aGraph,
                                           AudioDataValue* aBuffer,
                                           size_t aFrames,
                                           TrackRate aRate,
                                           uint32_t aChannels)
 {
   MOZ_ASSERT(aGraph->CurrentDriver()->OnThread());
@@ -139,18 +134,16 @@ MediaEngineWebRTCMicrophoneSource::Media
   , mSettings(
       new nsMainThreadPtrHolder<media::Refcountable<dom::MediaTrackSettings>>(
         "MediaEngineWebRTCMicrophoneSource::mSettings",
         new media::Refcountable<dom::MediaTrackSettings>(),
         // Non-strict means it won't assert main thread for us.
         // It would be great if it did but we're already on the media thread.
         /* aStrict = */ false))
   , mRequestedInputChannelCount(aMaxChannelCount)
-  , mTotalFrames(0)
-  , mLastLogFrames(0)
   , mSkipProcessing(false)
   , mInputDownmixBuffer(MAX_SAMPLING_FREQ * MAX_CHANNELS / 100)
 {
 #ifndef ANDROID
   MOZ_ASSERT(mDeviceInfo->DeviceID());
 #endif
 
   // We'll init lazily as needed
@@ -753,19 +746,16 @@ MediaEngineWebRTCMicrophoneSource::Start
 #endif
     mAllocation->mLiveFramesAppended = false;
     mAllocation->mLiveSilenceAppended = false;
 
     if (!mListener) {
       mListener = new WebRTCAudioDataListener(this);
     }
 
-    // Make sure logger starts before capture
-    AsyncLatencyLogger::Get(true);
-
     mAllocation->mStream->OpenAudioInput(deviceID, mListener);
 
     MOZ_ASSERT(mState != kReleased);
     mState = kStarted;
   }
 
   ApplySettings(mNetPrefs, mAllocation->mStream->GraphImpl());
 
@@ -1126,28 +1116,16 @@ MediaEngineWebRTCMicrophoneSource::Inser
                                                  uint32_t aChannels)
 {
   MutexAutoLock lock(mMutex);
 
   if (mState != kStarted) {
     return;
   }
 
-  if (MOZ_LOG_TEST(AudioLogModule(), LogLevel::Debug)) {
-    mTotalFrames += aFrames;
-    if (mAllocation->mStream &&
-        mTotalFrames > mLastLogFrames +
-                       mAllocation->mStream->GraphRate()) { // ~ 1 second
-      MOZ_LOG(AudioLogModule(), LogLevel::Debug,
-              ("%p: Inserting %zu samples into graph, total frames = %" PRIu64,
-               (void*)this, aFrames, mTotalFrames));
-      mLastLogFrames = mTotalFrames;
-    }
-  }
-
   if (!mAllocation->mStream) {
     return;
   }
 
   if (!mAllocation->mStream->GraphImpl()) {
     // The DOMMediaStream that owns mAllocation->mStream has been cleaned up
     // and MediaStream::DestroyImpl() has run in the MSG. This is fine and
     // can happen before the MediaManager thread gets to stop capture for
--- a/dom/media/webrtc/MediaEngineWebRTCAudio.h
+++ b/dom/media/webrtc/MediaEngineWebRTCAudio.h
@@ -274,18 +274,16 @@ private:
   // The current settings for the underlying device.
   // Member access is main thread only after construction.
   const nsMainThreadPtrHandle<media::Refcountable<dom::MediaTrackSettings>> mSettings;
 
   // The number of channels asked for by content, after clamping to the range of
   // legal channel count for this particular device. This is the number of
   // channels of the input buffer passed as parameter in NotifyInputData.
   uint32_t mRequestedInputChannelCount;
-  uint64_t mTotalFrames;
-  uint64_t mLastLogFrames;
 
   // mSkipProcessing is true if none of the processing passes are enabled,
   // because of prefs or constraints. This allows simply copying the audio into
   // the MSG, skipping resampling and the whole webrtc.org code.
   // This is read and written to only on the MSG thread.
   bool mSkipProcessing;
 
   // To only update microphone when needed, we keep track of the prefs
--- a/layout/build/nsLayoutStatics.cpp
+++ b/layout/build/nsLayoutStatics.cpp
@@ -74,17 +74,16 @@
 #include "nsXULContentUtils.h"
 #include "nsXULPrototypeCache.h"
 #include "nsXULTooltipListener.h"
 
 #include "nsMenuBarListener.h"
 #endif
 
 #include "CubebUtils.h"
-#include "Latency.h"
 #include "WebAudioUtils.h"
 
 #include "nsError.h"
 
 #include "nsJSEnvironment.h"
 #include "nsContentSink.h"
 #include "nsFrameMessageManager.h"
 #include "nsDOMMutationObserver.h"
@@ -216,17 +215,16 @@ nsLayoutStatics::Initialize()
 #endif
 
   rv = nsFocusManager::Init();
   if (NS_FAILED(rv)) {
     NS_ERROR("Could not initialize nsFocusManager");
     return rv;
   }
 
-  AsyncLatencyLogger::InitializeStatics();
   DecoderDoctorLogger::Init();
   MediaManager::StartupInit();
   CubebUtils::InitLibrary();
 
   nsContentSink::InitializeStatics();
   nsHtml5Module::InitializeStatics();
   mozilla::dom::FallbackEncoding::Initialize();
   nsLayoutUtils::Initialize();
@@ -354,17 +352,16 @@ nsLayoutStatics::Shutdown()
   nsGlobalWindowInner::ShutDown();
   nsGlobalWindowOuter::ShutDown();
   WebIDLGlobalNameHash::Shutdown();
   nsListControlFrame::Shutdown();
   nsXBLService::Shutdown();
   FrameLayerBuilder::Shutdown();
 
   CubebUtils::ShutdownLibrary();
-  AsyncLatencyLogger::ShutdownLogger();
   WebAudioUtils::Shutdown();
 
   nsCORSListenerProxy::Shutdown();
 
   PointerEventHandler::ReleaseStatics();
 
   TouchManager::ReleaseStatics();
 
--- a/media/webrtc/signaling/src/media-conduit/AudioConduit.cpp
+++ b/media/webrtc/signaling/src/media-conduit/AudioConduit.cpp
@@ -13,17 +13,16 @@
 
 #include "AudioConduit.h"
 #include "nsCOMPtr.h"
 #include "mozilla/Services.h"
 #include "nsServiceManagerUtils.h"
 #include "nsIPrefService.h"
 #include "nsIPrefBranch.h"
 #include "nsThreadUtils.h"
-#include "Latency.h"
 #include "mozilla/Telemetry.h"
 
 #include "webrtc/modules/audio_processing/include/audio_processing.h"
 #include "webrtc/modules/rtp_rtcp/include/rtp_rtcp.h"
 #include "webrtc/voice_engine/include/voe_errors.h"
 #include "webrtc/voice_engine/voice_engine_impl.h"
 #include "webrtc/system_wrappers/include/clock.h"
 
@@ -700,20 +699,16 @@ WebrtcAudioConduit::SendAudioFrame(const
 
   // if transmission is not started .. conduit cannot insert frames
   if(!mEngineTransmitting)
   {
     CSFLogError(LOGTAG, "%s Engine not transmitting ", __FUNCTION__);
     return kMediaConduitSessionNotInited;
   }
 
-  if (MOZ_LOG_TEST(GetLatencyLog(), LogLevel::Debug)) {
-    struct Processing insert = { TimeStamp::Now(), 0 };
-    mProcessing.AppendElement(insert);
-  }
 
   capture_delay = mCaptureDelay;
   // Insert the samples
   mPtrVoEBase->audio_transport()->PushCaptureData(mChannel, audio_data,
                                                   sizeof(audio_data[0])*8, // bits
                                                   samplingFreqHz,
                                                   channels,
                                                   lengthSamples);
@@ -802,58 +797,29 @@ WebrtcAudioConduit::GetAudioFrame(int16_
                   "A/V sync: sync delta: %dms, audio jitter delay %dms, playout delay %dms",
                   avsync_offset_ms, jitter_buffer_delay_ms, playout_buffer_delay_ms);
     } else {
       CSFLogError(LOGTAG, "A/V sync: GetAVStats failed");
     }
     mLastSyncLog = mSamples;
   }
 
-  if (MOZ_LOG_TEST(GetLatencyLog(), LogLevel::Debug)) {
-    if (mProcessing.Length() > 0) {
-      unsigned int now;
-      mPtrVoEVideoSync->GetPlayoutTimestamp(mChannel, now);
-      if (static_cast<uint32_t>(now) != mLastTimestamp) {
-        mLastTimestamp = static_cast<uint32_t>(now);
-        // Find the block that includes this timestamp in the network input
-        while (mProcessing.Length() > 0) {
-          // FIX! assumes 20ms @ 48000Hz
-          // FIX handle wrap-around
-          if (mProcessing[0].mRTPTimeStamp + 20*(48000/1000) >= now) {
-            TimeDuration t = TimeStamp::Now() - mProcessing[0].mTimeStamp;
-            // Wrap-around?
-            int64_t delta = t.ToMilliseconds() + (now - mProcessing[0].mRTPTimeStamp)/(48000/1000);
-            LogTime(AsyncLatencyLogger::AudioRecvRTP, ((uint64_t) this), delta);
-            break;
-          }
-          mProcessing.RemoveElementAt(0);
-        }
-      }
-    }
-  }
   CSFLogDebug(LOGTAG,"%s GetAudioFrame:Got samples: length %d ",__FUNCTION__,
                                                                lengthSamples);
   return kMediaConduitNoError;
 }
 
 // Transport Layer Callbacks
 MediaConduitErrorCode
 WebrtcAudioConduit::ReceivedRTPPacket(const void *data, int len, uint32_t ssrc)
 {
   CSFLogDebug(LOGTAG,  "%s : channel %d", __FUNCTION__, mChannel);
 
   if(mEngineReceiving)
   {
-    if (MOZ_LOG_TEST(GetLatencyLog(), LogLevel::Debug)) {
-      // timestamp is at 32 bits in ([1])
-      struct Processing insert = { TimeStamp::Now(),
-                                   ntohl(static_cast<const uint32_t *>(data)[1]) };
-      mProcessing.AppendElement(insert);
-    }
-
     // XXX we need to get passed the time the packet was received
     if(mPtrVoENetwork->ReceivedRTPPacket(mChannel, data, len) == -1)
     {
       int error = mPtrVoEBase->LastError();
       CSFLogError(LOGTAG, "%s RTP Processing Error %d", __FUNCTION__, error);
       if(error == VE_RTP_RTCP_MODULE_ERROR)
       {
         return kMediaConduitRTPRTCPModuleError;
@@ -982,26 +948,16 @@ WebrtcAudioConduit::StartReceiving()
 // Called on AudioGUM or MSG thread
 bool
 WebrtcAudioConduit::SendRtp(const uint8_t* data,
                             size_t len,
                             const webrtc::PacketOptions& options)
 {
   CSFLogDebug(LOGTAG,  "%s: len %lu", __FUNCTION__, (unsigned long)len);
 
-  if (MOZ_LOG_TEST(GetLatencyLog(), LogLevel::Debug)) {
-    if (mProcessing.Length() > 0) {
-      TimeStamp started = mProcessing[0].mTimeStamp;
-      mProcessing.RemoveElementAt(0);
-      mProcessing.RemoveElementAt(0); // 20ms packetization!  Could automate this by watching sizes
-      TimeDuration t = TimeStamp::Now() - started;
-      int64_t delta = t.ToMilliseconds();
-      LogTime(AsyncLatencyLogger::AudioSendRTP, ((uint64_t) this), delta);
-    }
-  }
   ReentrantMonitorAutoEnter enter(mTransportMonitor);
   // XXX(pkerr) - the PacketOptions are being ignored. This parameter was added along
   // with the Call API update in the webrtc.org codebase.
   // The only field in it is the packet_id, which is used when the header
   // extension for TransportSequenceNumber is being used, which we don't.
   (void)options;
   if(mTransmitterTransport &&
      (mTransmitterTransport->SendRtpPacket(data, len) == NS_OK))
--- a/media/webrtc/signaling/src/media-conduit/AudioConduit.h
+++ b/media/webrtc/signaling/src/media-conduit/AudioConduit.h
@@ -180,17 +180,16 @@ public:
                       mTransmitterTransport(nullptr),
                       mReceiverTransport(nullptr),
                       mEngineTransmitting(false),
                       mEngineReceiving(false),
                       mChannel(-1),
                       mDtmfEnabled(false),
                       mCodecMutex("AudioConduit codec db"),
                       mCaptureDelay(150),
-                      mLastTimestamp(0),
                       mSamples(0),
                       mLastSyncLog(0)
   {
   }
 
   virtual ~WebrtcAudioConduit();
 
   MediaConduitErrorCode Init();
@@ -334,18 +333,16 @@ private:
   RecvCodecList    mRecvCodecList;
 
   Mutex mCodecMutex; // protects mCurSendCodecConfig
   nsAutoPtr<AudioCodecConfig> mCurSendCodecConfig;
 
   // Current "capture" delay (really output plus input delay)
   int32_t mCaptureDelay;
 
-  uint32_t mLastTimestamp;
-
   webrtc::AudioFrame mAudioFrame; // for output pulls
 
   uint32_t mSamples;
   uint32_t mLastSyncLog;
 
   RtpSourceObserver mRtpSourceObserver;
 };
 
--- a/media/webrtc/signaling/src/mediapipeline/MediaPipeline.cpp
+++ b/media/webrtc/signaling/src/mediapipeline/MediaPipeline.cpp
@@ -77,18 +77,16 @@ static_assert((WEBRTC_MAX_SAMPLE_RATE / 
 using namespace mozilla;
 using namespace mozilla::dom;
 using namespace mozilla::gfx;
 using namespace mozilla::layers;
 
 mozilla::LazyLogModule gMediaPipelineLog("MediaPipeline");
 
 namespace mozilla {
-extern mozilla::LogModule*
-AudioLogModule();
 
 class VideoConverterListener
 {
 public:
   NS_INLINE_DECL_THREADSAFE_REFCOUNTING(VideoConverterListener)
 
   virtual void OnVideoFrameConverted(const webrtc::VideoFrame& aVideoFrame) = 0;
 
@@ -1928,17 +1926,16 @@ public:
     // If the sampling rate is not-supported, we will use 48kHz instead.
     , mRate(static_cast<AudioSessionConduit*>(mConduit.get())
                 ->IsSamplingFreqSupported(mSource->GraphRate())
               ? mSource->GraphRate()
               : WEBRTC_MAX_SAMPLE_RATE)
     , mTaskQueue(
         new TaskQueue(GetMediaThreadPool(MediaThreadType::WEBRTC_DECODER),
                       "AudioPipelineListener"))
-    , mLastLog(0)
   {
     AddTrackToSource(mRate);
   }
 
   // Implement MediaStreamListener
   void NotifyPull(MediaStreamGraph* aGraph,
                   StreamTime aDesiredTime) override
   {
@@ -2026,42 +2023,28 @@ private:
 
       segment.AppendFrames(
         samples.forget(), outputChannels, frames, mPrincipalHandle);
 
       // Handle track not actually added yet or removed/finished
       if (mSource->AppendToTrack(mTrackId, &segment)) {
         framesNeeded -= frames;
         mPlayedTicks += frames;
-        if (MOZ_LOG_TEST(AudioLogModule(), LogLevel::Debug)) {
-          if (mPlayedTicks > mLastLog + mRate) {
-            MOZ_LOG(AudioLogModule(),
-                    LogLevel::Debug,
-                    ("%p: Inserting samples into track %d, total = "
-                     "%" PRIu64,
-                     (void*)this,
-                     mTrackId,
-                     mPlayedTicks));
-            mLastLog = mPlayedTicks;
-          }
-        }
       } else {
         MOZ_LOG(gMediaPipelineLog, LogLevel::Error, ("AppendToTrack failed"));
         // we can't un-read the data, but that's ok since we don't want to
         // buffer - but don't i-loop!
         break;
       }
     }
   }
 
   RefPtr<MediaSessionConduit> mConduit;
   const TrackRate mRate;
   const RefPtr<TaskQueue> mTaskQueue;
-  // Graph's current sampling rate
-  TrackTicks mLastLog = 0; // mPlayedTicks when we last logged
 };
 
 MediaPipelineReceiveAudio::MediaPipelineReceiveAudio(
   const std::string& aPc,
   nsCOMPtr<nsIEventTarget> aMainThread,
   nsCOMPtr<nsIEventTarget> aStsThread,
   RefPtr<AudioSessionConduit> aConduit,
   dom::MediaStreamTrack* aTrack)
--- a/media/webrtc/signaling/test/FakeMediaStreamsImpl.h
+++ b/media/webrtc/signaling/test/FakeMediaStreamsImpl.h
@@ -5,19 +5,16 @@
 #ifndef FAKE_MEDIA_STREAMIMPL_H_
 #define FAKE_MEDIA_STREAMIMPL_H_
 
 #include "FakeMediaStreams.h"
 
 #include "nspr.h"
 #include "nsError.h"
 
-void LogTime(AsyncLatencyLogger::LatencyLogIndex index, uint64_t b, int64_t c) {}
-void LogLatency(AsyncLatencyLogger::LatencyLogIndex index, uint64_t b, int64_t c) {}
-
 static const int AUDIO_BUFFER_SIZE = 1600;
 static const int NUM_CHANNELS      = 2;
 static const int GRAPH_RATE        = 16000;
 
 NS_IMPL_ISUPPORTS0(Fake_DOMMediaStream)
 
 // Fake_MediaStream
 double Fake_MediaStream::StreamTimeToSeconds(mozilla::StreamTime aTime) {