Bug 964127: Add logging of webrtc a/v sync status r=jib
authorRandell Jesup <rjesup@jesup.org>
Wed, 12 Mar 2014 20:11:49 -0400
changeset 173299 0d44ffa10368dcf7f8fd11c0b3d78e918066b119
parent 173298 8ddc66a8d92975bc841b5225a7962fa8b1224f15
child 173300 8876cd22695be7bb62e9ba672bbe3a6bba6a714f
push id26399
push usercbook@mozilla.com
push dateThu, 13 Mar 2014 11:51:43 +0000
treeherdermozilla-central@fdbf79a891de [default view] [failures only]
perfherder[talos] [build metrics] [platform microbench] (compared to previous push)
reviewersjib
bugs964127
milestone30.0a1
first release with
nightly linux32
nightly linux64
nightly mac
nightly win32
nightly win64
last release without
nightly linux32
nightly linux64
nightly mac
nightly win32
nightly win64
Bug 964127: Add logging of webrtc a/v sync status r=jib
media/webrtc/signaling/src/media-conduit/AudioConduit.cpp
media/webrtc/signaling/src/media-conduit/AudioConduit.h
media/webrtc/trunk/webrtc/video_engine/vie_sync_module.cc
media/webrtc/trunk/webrtc/voice_engine/channel.cc
media/webrtc/trunk/webrtc/voice_engine/channel.h
media/webrtc/trunk/webrtc/voice_engine/include/voe_video_sync.h
media/webrtc/trunk/webrtc/voice_engine/test/auto_test/standard/video_sync_test.cc
media/webrtc/trunk/webrtc/voice_engine/voe_video_sync_impl.cc
media/webrtc/trunk/webrtc/voice_engine/voe_video_sync_impl.h
--- a/media/webrtc/signaling/src/media-conduit/AudioConduit.cpp
+++ b/media/webrtc/signaling/src/media-conduit/AudioConduit.cpp
@@ -682,16 +682,32 @@ WebrtcAudioConduit::GetAudioFrame(int16_
     CSFLogError(logTag,  "%s Getting audio data Failed %d", __FUNCTION__, error);
     if(error == VE_RUNTIME_PLAY_ERROR)
     {
       return kMediaConduitPlayoutError;
     }
     return kMediaConduitUnknownError;
   }
 
+  // Not #ifdef DEBUG or on a log module so we can use it for about:webrtc/etc
+  mSamples += lengthSamples;
+  if (mSamples >= mLastSyncLog + samplingFreqHz) {
+    int jitter_buffer_delay_ms = 0;
+    int playout_buffer_delay_ms = 0;
+    int avsync_offset_ms = 0;
+    mPtrVoEVideoSync->GetDelayEstimate(mChannel,
+                                       &jitter_buffer_delay_ms,
+                                       &playout_buffer_delay_ms,
+                                       &avsync_offset_ms); // ignore errors
+    CSFLogError(logTag,
+                "A/V sync: sync delta: %dms, audio jitter delay %dms, playout delay %dms",
+                avsync_offset_ms, jitter_buffer_delay_ms, playout_buffer_delay_ms);
+    mLastSyncLog = mSamples;
+  }
+
 #ifdef MOZILLA_INTERNAL_API
   if (PR_LOG_TEST(GetLatencyLog(), PR_LOG_DEBUG)) {
     if (mProcessing.Length() > 0) {
       unsigned int now;
       mPtrVoEVideoSync->GetPlayoutTimestamp(mChannel, now);
       if (static_cast<uint32_t>(now) != mLastTimestamp) {
         mLastTimestamp = static_cast<uint32_t>(now);
         // Find the block that includes this timestamp in the network input
--- a/media/webrtc/signaling/src/media-conduit/AudioConduit.h
+++ b/media/webrtc/signaling/src/media-conduit/AudioConduit.h
@@ -158,20 +158,22 @@ public:
                       mVoiceEngine(nullptr),
                       mTransport(nullptr),
                       mEngineTransmitting(false),
                       mEngineReceiving(false),
                       mChannel(-1),
                       mCurSendCodecConfig(nullptr),
                       mCaptureDelay(150),
                       mEchoOn(true),
-                      mEchoCancel(webrtc::kEcAec)
+                      mEchoCancel(webrtc::kEcAec),
 #ifdef MOZILLA_INTERNAL_API
-                      , mLastTimestamp(0)
+                      mLastTimestamp(0),
 #endif // MOZILLA_INTERNAL_API
+                      mSamples(0),
+                      mLastSyncLog(0)
   {
   }
 
   virtual ~WebrtcAudioConduit();
 
   MediaConduitErrorCode Init(WebrtcAudioConduit *other);
 
   int GetChannel() { return mChannel; }
@@ -259,13 +261,16 @@ private:
   int32_t mCaptureDelay;
 
   bool mEchoOn;
   webrtc::EcModes  mEchoCancel;
 
 #ifdef MOZILLA_INTERNAL_API
   uint32_t mLastTimestamp;
 #endif // MOZILLA_INTERNAL_API
+
+  uint32_t mSamples;
+  uint32_t mLastSyncLog;
 };
 
 } // end namespace
 
 #endif
--- a/media/webrtc/trunk/webrtc/video_engine/vie_sync_module.cc
+++ b/media/webrtc/trunk/webrtc/video_engine/vie_sync_module.cc
@@ -116,19 +116,21 @@ int32_t ViESyncModule::Process() {
   if (voe_channel_id_ == -1) {
     return 0;
   }
   assert(video_rtp_rtcp_ && voe_sync_interface_);
   assert(sync_.get());
 
   int audio_jitter_buffer_delay_ms = 0;
   int playout_buffer_delay_ms = 0;
+  int avsync_offset_ms = 0;
   if (voe_sync_interface_->GetDelayEstimate(voe_channel_id_,
                                             &audio_jitter_buffer_delay_ms,
-                                            &playout_buffer_delay_ms) != 0) {
+                                            &playout_buffer_delay_ms,
+                                            &avsync_offset_ms) != 0) {
     // Could not get VoE delay value, probably not a valid channel Id or
     // the channel have not received enough packets.
     WEBRTC_TRACE(webrtc::kTraceStream, webrtc::kTraceVideo, vie_channel_->Id(),
                  "%s: VE_GetDelayEstimate error for voice_channel %d",
                  __FUNCTION__, voe_channel_id_);
     return 0;
   }
   const int current_audio_delay_ms = audio_jitter_buffer_delay_ms +
@@ -149,21 +151,25 @@ int32_t ViESyncModule::Process() {
   }
 
   if (UpdateMeasurements(&audio_measurement_, *voice_rtp_rtcp,
                          *voice_receiver) != 0) {
     return 0;
   }
 
   int relative_delay_ms;
+  int result;
   // Calculate how much later or earlier the audio stream is compared to video.
-  if (!sync_->ComputeRelativeDelay(audio_measurement_, video_measurement_,
-                                   &relative_delay_ms)) {
+
+  result = sync_->ComputeRelativeDelay(audio_measurement_, video_measurement_,
+                                       &relative_delay_ms);
+  if (!result) {
     return 0;
   }
+  voe_sync_interface_->SetCurrentSyncOffset(voe_channel_id_, relative_delay_ms);
 
   TRACE_COUNTER1("webrtc", "SyncCurrentVideoDelay", current_video_delay_ms);
   TRACE_COUNTER1("webrtc", "SyncCurrentAudioDelay", current_audio_delay_ms);
   TRACE_COUNTER1("webrtc", "SyncRelativeDelay", relative_delay_ms);
   int target_audio_delay_ms = 0;
   int target_video_delay_ms = current_video_delay_ms;
   // Calculate the necessary extra audio delay and desired total video
   // delay to get the streams in sync.
--- a/media/webrtc/trunk/webrtc/voice_engine/channel.cc
+++ b/media/webrtc/trunk/webrtc/voice_engine/channel.cc
@@ -998,16 +998,17 @@ Channel::Channel(int32_t channelId,
     _connectionObserverPtr(NULL),
     _countAliveDetections(0),
     _countDeadDetections(0),
     _outputSpeechType(AudioFrame::kNormalSpeech),
     _average_jitter_buffer_delay_us(0),
     least_required_delay_ms_(0),
     _previousTimestamp(0),
     _recPacketDelayMs(20),
+    _current_sync_offset(0),
     _RxVadDetection(false),
     _rxApmIsEnabled(false),
     _rxAgcIsEnabled(false),
     _rxNsIsEnabled(false),
     restored_packet_in_use_(false)
 {
     WEBRTC_TRACE(kTraceMemory, kTraceVoice, VoEId(_instanceId,_channelId),
                  "Channel::Channel() - ctor");
@@ -2190,16 +2191,17 @@ int32_t Channel::ReceivedRTPPacket(const
     WEBRTC_TRACE(webrtc::kTraceDebug, webrtc::kTraceVoice, _channelId,
                  "Incoming packet: invalid RTP header");
     return -1;
   }
   header.payload_type_frequency =
       rtp_payload_registry_->GetPayloadTypeFrequency(header.payloadType);
   if (header.payload_type_frequency < 0)
     return -1;
+  // MUST call before IncomingPacket() or will always return false
   bool in_order = IsPacketInOrder(header);
   rtp_receive_statistics_->IncomingPacket(header, length,
       IsPacketRetransmitted(header, in_order));
   rtp_payload_registry_->SetIncomingPayloadType(header);
   return ReceivePacket(received_packet, length, header, in_order) ? 0 : -1;
 }
 
 bool Channel::ReceivePacket(const uint8_t* packet,
@@ -4714,25 +4716,27 @@ Channel::GetNetworkStatistics(NetworkSta
     int return_value = audio_coding_->NetworkStatistics(&acm_stats);
     if (return_value >= 0) {
       memcpy(&stats, &acm_stats, sizeof(NetworkStatistics));
     }
     return return_value;
 }
 
 bool Channel::GetDelayEstimate(int* jitter_buffer_delay_ms,
-                               int* playout_buffer_delay_ms) const {
+                               int* playout_buffer_delay_ms,
+                               int* avsync_offset_ms) const {
   if (_average_jitter_buffer_delay_us == 0) {
     WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
                  "Channel::GetDelayEstimate() no valid estimate.");
     return false;
   }
   *jitter_buffer_delay_ms = (_average_jitter_buffer_delay_us + 500) / 1000 +
       _recPacketDelayMs;
   *playout_buffer_delay_ms = playout_delay_ms_;
+  *avsync_offset_ms = _current_sync_offset;
   WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
                "Channel::GetDelayEstimate()");
   return true;
 }
 
 int Channel::SetInitialPlayoutDelay(int delay_ms)
 {
   WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
--- a/media/webrtc/trunk/webrtc/voice_engine/channel.h
+++ b/media/webrtc/trunk/webrtc/voice_engine/channel.h
@@ -199,20 +199,22 @@ public:
     int GetRoundTripTimeSummary(StatVal& delaysMs) const;
     int GetDeadOrAliveCounters(int& countDead, int& countAlive) const;
 
     // VoENetEqStats
     int GetNetworkStatistics(NetworkStatistics& stats);
 
     // VoEVideoSync
     bool GetDelayEstimate(int* jitter_buffer_delay_ms,
-                          int* playout_buffer_delay_ms) const;
+                          int* playout_buffer_delay_ms,
+                          int* avsync_offset_ms) const;
     int least_required_delay_ms() const { return least_required_delay_ms_; }
     int SetInitialPlayoutDelay(int delay_ms);
     int SetMinimumPlayoutDelay(int delayMs);
+    void SetCurrentSyncOffset(int offsetMs) { _current_sync_offset = offsetMs; }
     int GetPlayoutTimestamp(unsigned int& timestamp);
     void UpdatePlayoutTimestamp(bool rtcp);
     int SetInitTimestamp(unsigned int timestamp);
     int SetInitSequenceNumber(short sequenceNumber);
 
     // VoEVideoSyncExtended
     int GetRtpRtcp(RtpRtcp** rtpRtcpModule, RtpReceiver** rtp_receiver) const;
 
@@ -555,16 +557,17 @@ private:
     uint32_t _countAliveDetections;
     uint32_t _countDeadDetections;
     AudioFrame::SpeechType _outputSpeechType;
     // VoEVideoSync
     uint32_t _average_jitter_buffer_delay_us;
     int least_required_delay_ms_;
     uint32_t _previousTimestamp;
     uint16_t _recPacketDelayMs;
+    int _current_sync_offset;
     // VoEAudioProcessing
     bool _RxVadDetection;
     bool _rxApmIsEnabled;
     bool _rxAgcIsEnabled;
     bool _rxNsIsEnabled;
     bool restored_packet_in_use_;
 };
 
--- a/media/webrtc/trunk/webrtc/voice_engine/include/voe_video_sync.h
+++ b/media/webrtc/trunk/webrtc/voice_engine/include/voe_video_sync.h
@@ -60,28 +60,34 @@ public:
 
     // Sets a minimum target delay for the jitter buffer. This delay is
     // maintained by the jitter buffer, unless channel condition (jitter in
     // inter-arrival times) dictates a higher required delay. The overall
     // jitter buffer delay is max of |delay_ms| and the latency that NetEq
     // computes based on inter-arrival times and its playout mode.
     virtual int SetMinimumPlayoutDelay(int channel, int delay_ms) = 0;
 
+    // Sets the current a/v delay in ms (negative is video leading) if known,
+    // otherwise 0.
+    virtual int SetCurrentSyncOffset(int channel, int offset_ms) = 0;
+
     // Sets an initial delay for the playout jitter buffer. The playout of the
     // audio is delayed by |delay_ms| in milliseconds. Thereafter, the delay is
     // maintained, unless NetEq's internal mechanism requires a higher latency.
     // Such a latency is computed based on inter-arrival times and NetEq's
     // playout mode.
     virtual int SetInitialPlayoutDelay(int channel, int delay_ms) = 0;
 
-    // Gets the |jitter_buffer_delay_ms| (including the algorithmic delay), and
-    // the |playout_buffer_delay_ms| for a specified |channel|.
+    // Gets the |jitter_buffer_delay_ms| (including the algorithmic delay),
+    // the |playout_buffer_delay_ms| and |avsync_offset_ms| for a specified
+    // |channel|.
     virtual int GetDelayEstimate(int channel,
                                  int* jitter_buffer_delay_ms,
-                                 int* playout_buffer_delay_ms) = 0;
+                                 int* playout_buffer_delay_ms,
+                                 int* avsync_offset_ms) = 0;
 
     // Returns the least required jitter buffer delay. This is computed by the
     // the jitter buffer based on the inter-arrival time of RTP packets and
     // playout mode. NetEq maintains this latency unless a higher value is
     // requested by calling SetMinimumPlayoutDelay().
     virtual int GetLeastRequiredDelayMs(int channel) const = 0;
 
     // Manual initialization of the RTP timestamp.
--- a/media/webrtc/trunk/webrtc/voice_engine/test/auto_test/standard/video_sync_test.cc
+++ b/media/webrtc/trunk/webrtc/voice_engine/test/auto_test/standard/video_sync_test.cc
@@ -29,19 +29,21 @@ class VideoSyncTest : public AfterStream
   // All samples are checked so they are greater than |min_estimate|.
   int CollectEstimatesDuring15Seconds(int min_estimate) {
     Sleep(1000);
 
     std::vector<int> all_delay_estimates;
     for (int second = 0; second < 15; second++) {
       int jitter_buffer_delay_ms = 0;
       int playout_buffer_delay_ms = 0;
+      int avsync_offset_ms = 0;
       EXPECT_EQ(0, voe_vsync_->GetDelayEstimate(channel_,
                                                 &jitter_buffer_delay_ms,
-                                                &playout_buffer_delay_ms));
+                                                &playout_buffer_delay_ms,
+                                                &avsync_offset_ms));
 
       EXPECT_GT(jitter_buffer_delay_ms, min_estimate) <<
           "The delay estimate can not conceivably get lower than " <<
           min_estimate << " ms, it's unrealistic.";
 
       all_delay_estimates.push_back(jitter_buffer_delay_ms);
       Sleep(1000);
     }
--- a/media/webrtc/trunk/webrtc/voice_engine/voe_video_sync_impl.cc
+++ b/media/webrtc/trunk/webrtc/voice_engine/voe_video_sync_impl.cc
@@ -135,16 +135,33 @@ int VoEVideoSyncImpl::SetMinimumPlayoutD
     {
         _shared->SetLastError(VE_CHANNEL_NOT_VALID, kTraceError,
             "SetMinimumPlayoutDelay() failed to locate channel");
         return -1;
     }
     return channelPtr->SetMinimumPlayoutDelay(delayMs);
 }
 
+int VoEVideoSyncImpl::SetCurrentSyncOffset(int channel, int offsetMs)
+{
+    WEBRTC_TRACE(kTraceApiCall, kTraceVoice, VoEId(_shared->instance_id(), -1),
+                 "SetCurrentSyncOffset(channel=%d, offsetMs=%d)",
+                 channel, offsetMs);
+    voe::ChannelOwner ch = _shared->channel_manager().GetChannel(channel);
+    voe::Channel* channelPtr = ch.channel();
+    if (channelPtr == NULL)
+    {
+        _shared->SetLastError(VE_CHANNEL_NOT_VALID, kTraceError,
+            "SetCurrentSyncOffset() failed to locate channel");
+        return -1;
+    }
+    channelPtr->SetCurrentSyncOffset(offsetMs);
+    return 0;
+}
+
 int VoEVideoSyncImpl::SetInitialPlayoutDelay(int channel, int delay_ms)
 {
     WEBRTC_TRACE(kTraceApiCall, kTraceVoice, VoEId(_shared->instance_id(), -1),
                  "SetInitialPlayoutDelay(channel=%d, delay_ms=%d)",
                  channel, delay_ms);
     IPHONE_NOT_SUPPORTED(_shared->statistics());
 
     if (!_shared->statistics().Initialized())
@@ -160,34 +177,36 @@ int VoEVideoSyncImpl::SetInitialPlayoutD
             "SetInitialPlayoutDelay() failed to locate channel");
         return -1;
     }
     return channelPtr->SetInitialPlayoutDelay(delay_ms);
 }
 
 int VoEVideoSyncImpl::GetDelayEstimate(int channel,
                                        int* jitter_buffer_delay_ms,
-                                       int* playout_buffer_delay_ms) {
+                                       int* playout_buffer_delay_ms,
+                                       int* avsync_offset_ms) {
   WEBRTC_TRACE(kTraceApiCall, kTraceVoice, VoEId(_shared->instance_id(), -1),
                "GetDelayEstimate(channel=%d, delayMs=?)", channel);
   IPHONE_NOT_SUPPORTED(_shared->statistics());
 
   if (!_shared->statistics().Initialized()) {
     _shared->SetLastError(VE_NOT_INITED, kTraceError);
     return -1;
   }
   voe::ChannelOwner ch = _shared->channel_manager().GetChannel(channel);
   voe::Channel* channelPtr = ch.channel();
   if (channelPtr == NULL) {
     _shared->SetLastError(VE_CHANNEL_NOT_VALID, kTraceError,
                           "GetDelayEstimate() failed to locate channel");
     return -1;
   }
   if (!channelPtr->GetDelayEstimate(jitter_buffer_delay_ms,
-                                    playout_buffer_delay_ms)) {
+                                    playout_buffer_delay_ms,
+                                    avsync_offset_ms)) {
     return -1;
   }
   return 0;
 }
 
 int VoEVideoSyncImpl::GetPlayoutBufferSize(int& bufferMs)
 {
     WEBRTC_TRACE(kTraceApiCall, kTraceVoice, VoEId(_shared->instance_id(), -1),
--- a/media/webrtc/trunk/webrtc/voice_engine/voe_video_sync_impl.h
+++ b/media/webrtc/trunk/webrtc/voice_engine/voe_video_sync_impl.h
@@ -19,21 +19,24 @@ namespace webrtc {
 
 class VoEVideoSyncImpl : public VoEVideoSync
 {
 public:
     virtual int GetPlayoutBufferSize(int& bufferMs);
 
     virtual int SetMinimumPlayoutDelay(int channel, int delayMs);
 
+    virtual int SetCurrentSyncOffset(int channel, int offsetMs);
+
     virtual int SetInitialPlayoutDelay(int channel, int delay_ms);
 
     virtual int GetDelayEstimate(int channel,
                                  int* jitter_buffer_delay_ms,
-                                 int* playout_buffer_delay_ms);
+                                 int* playout_buffer_delay_ms,
+                                 int* avsync_offset_ms);
 
     virtual int GetLeastRequiredDelayMs(int channel) const;
 
     virtual int SetInitTimestamp(int channel, unsigned int timestamp);
 
     virtual int SetInitSequenceNumber(int channel, short sequenceNumber);
 
     virtual int GetPlayoutTimestamp(int channel, unsigned int& timestamp);