Bug 970691 - Part 1: Add timestamp to fake video. r=jesup
☠☠ backed out by d8014e46546e ☠ ☠
authorPaul Kerr [:pkerr] <paulrkerr@gmail.com>
Sat, 22 Mar 2014 16:35:43 -0700
changeset 179830 94348d189ed5b93f8494f01467d18ca55262a9f3
parent 179829 03ad79f213ba29727044ecdd211db8d57185dff7
child 179831 83f7aec5a083650f8cba8701c577cfb6502fcfea
push id26645
push usercbook@mozilla.com
push dateThu, 24 Apr 2014 09:38:53 +0000
treeherdermozilla-central@6965a913e858 [default view] [failures only]
perfherder[talos] [build metrics] [platform microbench] (compared to previous push)
reviewersjesup
bugs970691
milestone31.0a1
first release with
nightly linux32
nightly linux64
nightly mac
nightly win32
nightly win64
last release without
nightly linux32
nightly linux64
nightly mac
nightly win32
nightly win64
Bug 970691 - Part 1: Add timestamp to fake video. r=jesup Update YuvStamper utility. Add a CRC32 to the encoded payload and have the decode method us this to verify reception. Wrap encoded values across multiple lines in the frame buffer when necessary. Use YuvStamper to encode a timestamp in each fake video frame. Extract the value in VideoConduit to calculate the video latency and add this to a running average latency when enabled via config.
content/media/webrtc/MediaEngineDefault.cpp
media/webrtc/moz.build
media/webrtc/signaling/signaling.gyp
media/webrtc/signaling/src/media-conduit/VideoConduit.cpp
media/webrtc/signaling/src/media-conduit/VideoConduit.h
media/webrtc/signaling/src/media-conduit/YuvStamper.cpp
media/webrtc/signaling/src/media-conduit/YuvStamper.h
--- a/content/media/webrtc/MediaEngineDefault.cpp
+++ b/content/media/webrtc/MediaEngineDefault.cpp
@@ -17,16 +17,18 @@
 #include "nsIPrefService.h"
 #include "nsIPrefBranch.h"
 
 #ifdef MOZ_WIDGET_ANDROID
 #include "AndroidBridge.h"
 #include "nsISupportsUtils.h"
 #endif
 
+#include "YuvStamper.h"
+
 #define VIDEO_RATE USECS_PER_S
 #define AUDIO_RATE 16000
 #define AUDIO_FRAME_LENGTH ((AUDIO_RATE * MediaEngine::DEFAULT_AUDIO_TIMER_MS) / 1000)
 namespace mozilla {
 
 using namespace mozilla::gfx;
 
 NS_IMPL_ISUPPORTS1(MediaEngineDefaultVideoSource, nsITimerCallback)
@@ -234,16 +236,23 @@ MediaEngineDefaultVideoSource::Notify(ns
   }
 
   // Allocate a single solid color image
   nsRefPtr<layers::Image> image = mImageContainer->CreateImage(ImageFormat::PLANAR_YCBCR);
   nsRefPtr<layers::PlanarYCbCrImage> ycbcr_image =
       static_cast<layers::PlanarYCbCrImage*>(image.get());
   layers::PlanarYCbCrData data;
   AllocateSolidColorFrame(data, mOpts.mWidth, mOpts.mHeight, 0x80, mCb, mCr);
+
+ uint64_t timestamp = PR_Now();
+  YuvStamper::Encode(mOpts.mWidth, mOpts.mHeight, mOpts.mWidth,
+		     reinterpret_cast<uint8_t*>(data.mYChannel),
+		     reinterpret_cast<uint8_t*>(&timestamp), sizeof(timestamp),
+		     0, 0);
+
   ycbcr_image->SetData(data);
   // SetData copies data, so we can free the frame
   ReleaseFrame(data);
 
   MonitorAutoLock lock(mMonitor);
 
   // implicitly releases last image
   mImage = ycbcr_image.forget();
--- a/media/webrtc/moz.build
+++ b/media/webrtc/moz.build
@@ -209,8 +209,10 @@ if CONFIG['MOZ_WIDGET_TOOLKIT'] != 'gonk
         GYP_DIRS += ['signalingtest']
         GYP_DIRS['signalingtest'].input = 'signaling/signaling.gyp'
         GYP_DIRS['signalingtest'].variables = gyp_vars.copy()
         GYP_DIRS['signalingtest'].variables.update(
             build_for_test=1
         )
         GYP_DIRS['signalingtest'].non_unified_sources += signaling_non_unified_sources
         GYP_DIRS['signalingtest'].non_unified_sources += signaling_non_unified_sources_2
+
+EXPORTS += ['signaling/src/media-conduit/YuvStamper.h']
--- a/media/webrtc/signaling/signaling.gyp
+++ b/media/webrtc/signaling/signaling.gyp
@@ -61,16 +61,17 @@
         '../trunk/webrtc/video_engine/include',
         '../trunk/webrtc/voice_engine/include',
         '../trunk/webrtc/modules/interface',
         '../trunk/webrtc/peerconnection',
         '../../libyuv/include',
         '../../../netwerk/srtp/src/include',
         '../../../netwerk/srtp/src/crypto/include',
         '../../../ipc/chromium/src',
+        '../../mtransport/third_party/nrappkit/src/util/libekr',
       ],
 
       #
       # DEPENDENCIES
       #
       'dependencies': [
       ],
 
@@ -82,16 +83,19 @@
       # SOURCES
       #
       'sources': [
         # Media Conduit
         './src/media-conduit/AudioConduit.h',
         './src/media-conduit/AudioConduit.cpp',
         './src/media-conduit/VideoConduit.h',
         './src/media-conduit/VideoConduit.cpp',
+        './src/media-conduit/YuvStamper.h',
+        './src/media-conduit/YuvStamper.cpp',
+
         # Common
         './src/common/CommonTypes.h',
         './src/common/csf_common.h',
         './src/common/NullDeleter.h',
         './src/common/Wrapper.h',
         './src/common/NullTransport.h',
         # Browser Logging
         './src/common/browser_logging/CSFLog.cpp',
--- a/media/webrtc/signaling/src/media-conduit/VideoConduit.cpp
+++ b/media/webrtc/signaling/src/media-conduit/VideoConduit.cpp
@@ -6,18 +6,21 @@
 #include "nspr.h"
 
 // For rtcp-fb constants
 #include "ccsdp.h"
 
 #include "VideoConduit.h"
 #include "AudioConduit.h"
 #include "nsThreadUtils.h"
-
 #include "LoadManager.h"
+#include "YuvStamper.h"
+#include "nsServiceManagerUtils.h"
+#include "nsIPrefService.h"
+#include "nsIPrefBranch.h"
 
 #include "webrtc/common_video/interface/native_handle.h"
 #include "webrtc/video_engine/include/vie_errors.h"
 
 #ifdef MOZ_WIDGET_ANDROID
 #include "AndroidJNIWrapper.h"
 #endif
 
@@ -728,19 +731,34 @@ WebrtcVideoConduit::ConfigureRecvMediaCo
   mUsingNackBasic = use_nack_basic;
 
   //Start Receive on the video engine
   if(mPtrViEBase->StartReceive(mChannel) == -1)
   {
     error = mPtrViEBase->LastError();
     CSFLogError(logTag, "%s Start Receive Error %d ", __FUNCTION__, error);
 
+
     return kMediaConduitUnknownError;
   }
 
+#ifdef MOZILLA_INTERNAL_API
+  if (NS_IsMainThread()) {
+    nsresult rv;
+    nsCOMPtr<nsIPrefService> prefs = do_GetService("@mozilla.org/preferences-service;1", &rv);
+    if (NS_SUCCEEDED(rv)) {
+      nsCOMPtr<nsIPrefBranch> branch = do_QueryInterface(prefs);
+
+      if (branch) {
+	branch->GetBoolPref("media.video.test_latency", &mVideoLatencyTestEnable);
+      }
+    }
+  }
+#endif
+
   // by now we should be successfully started the reception
   mPtrRTP->SetRembStatus(mChannel, false, true);
   mEngineReceiving = true;
   DumpCodecDB();
   return kMediaConduitNoError;
 }
 
 // XXX we need to figure out how to feed back changes in preferred capture
@@ -1045,16 +1063,20 @@ int WebrtcVideoConduit::SendRTCPPacket(i
 // WebRTC::ExternalMedia Implementation
 int
 WebrtcVideoConduit::FrameSizeChange(unsigned int width,
                                     unsigned int height,
                                     unsigned int numStreams)
 {
   CSFLogDebug(logTag,  "%s ", __FUNCTION__);
 
+
+  mReceivingWidth = width;
+  mReceivingHeight = height;
+
   if(mRenderer)
   {
     mRenderer->FrameSizeChange(width, height, numStreams);
     return 0;
   }
 
   CSFLogError(logTag,  "%s Renderer is NULL ", __FUNCTION__);
   return -1;
@@ -1074,16 +1096,28 @@ WebrtcVideoConduit::DeliverFrame(unsigne
     layers::Image* img = nullptr;
     // |handle| should be a webrtc::NativeHandle if available.
     if (handle) {
       webrtc::NativeHandle* native_h = static_cast<webrtc::NativeHandle*>(handle);
       // In the handle, there should be a layers::Image.
       img = static_cast<layers::Image*>(native_h->GetHandle());
     }
 
+    if (mVideoLatencyTestEnable && mReceivingWidth && mReceivingHeight) {
+      uint64_t now = PR_Now();
+      uint64_t timestamp = 0;
+      bool ok = YuvStamper::Decode(mReceivingWidth, mReceivingHeight, mReceivingWidth,
+				   buffer,
+				   reinterpret_cast<uint8_t*>(&timestamp),
+				   sizeof(timestamp), 0, 0);
+      if (ok) {
+	VideoLatencyUpdate(now - timestamp);
+      }
+    }
+
     const ImageHandle img_h(img);
     mRenderer->RenderVideoFrame(buffer, buffer_size, time_stamp, render_time,
                                 img_h);
     return 0;
   }
 
   CSFLogError(logTag,  "%s Renderer is NULL  ", __FUNCTION__);
   return -1;
@@ -1202,9 +1236,21 @@ WebrtcVideoConduit::DumpCodecDB() const
   {
     CSFLogDebug(logTag,"Payload Name: %s", mRecvCodecList[i]->mName.c_str());
     CSFLogDebug(logTag,"Payload Type: %d", mRecvCodecList[i]->mType);
     CSFLogDebug(logTag,"Payload Max Frame Size: %d", mRecvCodecList[i]->mMaxFrameSize);
     CSFLogDebug(logTag,"Payload Max Frame Rate: %d", mRecvCodecList[i]->mMaxFrameRate);
   }
 }
 
+void
+WebrtcVideoConduit::VideoLatencyUpdate(uint64_t newSample)
+{
+  mVideoLatencyAvg = (sRoundingPadding * newSample + sAlphaNum * mVideoLatencyAvg) / sAlphaDen;
+}
+
+uint64_t
+WebrtcVideoConduit::MozVideoLatencyAvg()
+{
+  return mVideoLatencyAvg / sRoundingPadding;
+}
+
 }// end namespace
--- a/media/webrtc/signaling/src/media-conduit/VideoConduit.h
+++ b/media/webrtc/signaling/src/media-conduit/VideoConduit.h
@@ -223,17 +223,21 @@ public:
                       mRenderer(nullptr),
                       mPtrExtCapture(nullptr),
                       mEngineTransmitting(false),
                       mEngineReceiving(false),
                       mChannel(-1),
                       mCapId(-1),
                       mCurSendCodecConfig(nullptr),
                       mSendingWidth(0),
-                      mSendingHeight(0)
+		      mSendingHeight(0),
+		      mReceivingWidth(640),
+		      mReceivingHeight(480),
+		      mVideoLatencyTestEnable(false),
+		      mVideoLatencyAvg(0)
   {
   }
 
   virtual ~WebrtcVideoConduit() ;
 
   MediaConduitErrorCode Init(WebrtcVideoConduit *other);
 
   int GetChannel() { return mChannel; }
@@ -248,16 +252,17 @@ public:
                              uint32_t* jitterMs,
                              uint32_t* packetsReceived,
                              uint64_t* bytesReceived,
                              uint32_t* cumulativeLost,
                              int32_t* rttMs);
   bool GetRTCPSenderReport(DOMHighResTimeStamp* timestamp,
                            unsigned int* packetsSent,
                            uint64_t* bytesSent);
+  uint64_t MozVideoLatencyAvg();
 
 private:
 
   WebrtcVideoConduit(const WebrtcVideoConduit& other) MOZ_DELETE;
   void operator=(const WebrtcVideoConduit& other) MOZ_DELETE;
 
   //Local database of currently applied receive codecs
   typedef std::vector<VideoCodecConfig* > RecvCodecList;
@@ -276,16 +281,19 @@ private:
                            const VideoCodecConfig* codecInfo) const;
 
   //Checks the codec to be applied
   MediaConduitErrorCode ValidateCodecConfig(const VideoCodecConfig* codecInfo, bool send) const;
 
   //Utility function to dump recv codec database
   void DumpCodecDB() const;
 
+  // Video Latency Test averaging filter
+  void VideoLatencyUpdate(uint64_t new_sample);
+
   // The two sides of a send/receive pair of conduits each keep a pointer to the other.
   // They also share a single VideoEngine and mChannel.  Shutdown must be coordinated
   // carefully to avoid double-freeing or accessing after one frees.
   WebrtcVideoConduit*  mOtherDirection;
   // The other side has shut down our mChannel and related items already
   bool mShutDown;
 
   // A few of these are shared by both directions.  They're released by the last
@@ -309,15 +317,23 @@ private:
   bool mEngineReceiving;    // if true ==> Receive Sus-sysmtem up and running
 
   int mChannel; // Video Channel for this conduit
   int mCapId;   // Capturer for this conduit
   RecvCodecList    mRecvCodecList;
   VideoCodecConfig* mCurSendCodecConfig;
   unsigned short mSendingWidth;
   unsigned short mSendingHeight;
+  unsigned short mReceivingWidth;
+  unsigned short mReceivingHeight;
+  bool mVideoLatencyTestEnable;
+  uint64_t mVideoLatencyAvg;
+
+  static const unsigned int sAlphaNum = 7;
+  static const unsigned int sAlphaDen = 8;
+  static const unsigned int sRoundingPadding = 1024;
 
   mozilla::RefPtr<WebrtcAudioConduit> mSyncedTo;
 };
 
 } // end namespace
 
 #endif
new file mode 100644
--- /dev/null
+++ b/media/webrtc/signaling/src/media-conduit/YuvStamper.cpp
@@ -0,0 +1,190 @@
+/* This Source Code Form is subject to the terms of the Mozilla Public
+ * License, v. 2.0. If a copy of the MPL was not distributed with this file,
+ * You can obtain one at http://mozilla.org/MPL/2.0/. */
+
+#ifdef HAVE_NETINET_IN_H
+#include <netinet/in.h>
+#elif defined XP_WIN
+#include <winsock2.h>
+#endif
+
+#include "YuvStamper.h"
+
+typedef uint32_t UINT4; //Needed for r_crc32() call
+extern "C" {
+#include "r_crc32.h"
+}
+
+namespace mozilla {
+
+  YuvStamper::YuvStamper(uint8_t* pYData,
+			 uint32_t width,
+			 uint32_t height,
+			 uint32_t stride,
+			 uint32_t x,
+			 uint32_t y):
+    pYData(pYData), mStride(stride),
+    mWidth(width), mHeight(height),
+    mCursor(x, y) {}
+
+  bool YuvStamper::Encode(uint32_t width, uint32_t height, uint32_t stride,
+			  uint8_t* pYData, uint8_t* pMsg, size_t msg_len,
+			  uint32_t x, uint32_t y)
+  {
+    YuvStamper stamper(pYData, width, height, stride, x, y);
+
+    // Reserve space for a checksum.
+    if (stamper.Capacity() < 8 * (msg_len + sizeof(uint32_t)))
+    {
+      return false;
+    }
+
+    bool ok = false;
+    uint32_t crc;
+    uint8_t* pCrc = reinterpret_cast<uint8_t*>(&crc);
+    r_crc32(reinterpret_cast<char*>(pMsg), (int)msg_len, &crc);
+    crc = htonl(crc);
+
+    while (msg_len-- > 0) {
+      if (!stamper.Write8(*pMsg++)) {
+	return false;
+      }
+    }
+
+    // Add checksum after the message.
+    ok = stamper.Write8(*pCrc++) &&
+         stamper.Write8(*pCrc++) &&
+         stamper.Write8(*pCrc++) &&
+         stamper.Write8(*pCrc++);
+
+    return ok;
+  }
+
+  bool YuvStamper::Decode(uint32_t width, uint32_t height, uint32_t stride,
+			  uint8_t* pYData, uint8_t* pMsg, size_t msg_len,
+			  uint32_t x, uint32_t y)
+  {
+    YuvStamper stamper(pYData, width, height, stride, x, y);
+    uint8_t* ptr = pMsg;
+    size_t len = msg_len;
+    uint32_t crc, msg_crc;
+    uint8_t* pCrc = reinterpret_cast<uint8_t*>(&crc);
+
+    // Account for space reserved for the checksum
+    if (stamper.Capacity() < 8 * (len + sizeof(uint32_t))) {
+      return false;
+    }
+
+    while (len-- > 0) {
+      if(!stamper.Read8(*ptr++)) {
+	return false;
+      }
+    }
+
+    if (!(stamper.Read8(*pCrc++) &&
+          stamper.Read8(*pCrc++) &&
+          stamper.Read8(*pCrc++) &&
+          stamper.Read8(*pCrc++))) {
+      return false;
+    }
+
+    r_crc32(reinterpret_cast<char*>(pMsg), (int)msg_len, &msg_crc);
+    return crc == htonl(msg_crc);
+  }
+
+  inline uint32_t YuvStamper::Capacity()
+  {
+    // Enforce at least a symbol width and height offset from outer edges.
+    if (mCursor.y + sBitSize > mHeight) {
+      return 0;
+    }
+
+    if (mCursor.x + sBitSize > mWidth && !AdvanceCursor()) {
+      return 0;
+    }
+
+    // Normalize frame integral to sBitSize x sBitSize
+    uint32_t width = mWidth / sBitSize;
+    uint32_t height = mHeight / sBitSize;
+    uint32_t x = mCursor.x / sBitSize;
+    uint32_t y = mCursor.y / sBitSize;
+
+    return (width * height - width * y)- x;
+  }
+
+  bool YuvStamper::Write8(uint8_t value)
+  {
+    // Encode MSB to LSB.
+    uint8_t mask = 0x80;
+    while (mask) {
+      if (!WriteBit(!!(value & mask))) {
+	return false;
+      }
+      mask >>= 1;
+    }
+    return true;
+  }
+
+  bool YuvStamper::WriteBit(bool one)
+  {
+    // A bit is mapped to a sBitSize x sBitSize square of luma data points.
+    uint8_t value = one ? sYOn : sYOff;
+    for (uint32_t y = 0; y < sBitSize; y++) {
+      for (uint32_t x = 0; x < sBitSize; x++) {
+	*(pYData + (mCursor.x + x) + ((mCursor.y + y) * mStride)) = value;
+      }
+    }
+
+    return AdvanceCursor();
+  }
+
+  bool YuvStamper::AdvanceCursor()
+  {
+    mCursor.x += sBitSize;
+    if (mCursor.x + sBitSize > mWidth) {
+      // move to the start of the next row if possible.
+      mCursor.y += sBitSize;
+      if (mCursor.y + sBitSize > mHeight) {
+	// end of frame, do not advance
+	mCursor.y -= sBitSize;
+	mCursor.x -= sBitSize;
+	return false;
+      } else {
+	mCursor.x = 0;
+      }
+    }
+
+    return true;
+  }
+
+  bool YuvStamper::Read8(uint8_t &value) {
+    uint8_t octet = 0;
+    uint8_t bit = 0;
+
+    for (int i = 8; i > 0; --i) {
+      if (!ReadBit(bit)) {
+	return false;
+      }
+      octet <<= 1;
+      octet |= bit;
+    }
+
+    value = octet;
+    return true;
+  }
+
+  bool YuvStamper::ReadBit(uint8_t &bit)
+  {
+    uint32_t sum = 0;
+    for (uint32_t y = 0; y < sBitSize; y++) {
+      for (uint32_t x = 0; x < sBitSize; x++) {
+	sum += *(pYData + mStride * (mCursor.y + y) + mCursor.x + x);
+      }
+    }
+
+    // apply threshold to collected bit square
+    bit = (sum > (sBitThreshold * sBitSize * sBitSize)) ? 1 : 0;
+    return AdvanceCursor();
+  }
+
+}  // Namespace mozilla.
new file mode 100644
--- /dev/null
+++ b/media/webrtc/signaling/src/media-conduit/YuvStamper.h
@@ -0,0 +1,47 @@
+/* This Source Code Form is subject to the terms of the Mozilla Public
+ * License, v. 2.0. If a copy of the MPL was not distributed with this file,
+ * You can obtain one at http://mozilla.org/MPL/2.0/. */
+
+namespace mozilla {
+
+class
+YuvStamper {
+public:
+  YuvStamper(uint8_t* pYData,
+	     uint32_t width, uint32_t height, uint32_t stride,
+	     uint32_t x = 0, uint32_t y = 0);
+  static bool Encode(uint32_t width, uint32_t height, uint32_t stride,
+		     uint8_t* pYData, uint8_t* pMsg, size_t msg_len,
+		     uint32_t x, uint32_t y);
+
+  static bool Decode(uint32_t width, uint32_t height, uint32_t stride,
+		     uint8_t* pYData, uint8_t* pMsg, size_t msg_len,
+		     uint32_t x, uint32_t y);
+
+ private:
+  uint32_t Capacity();
+  bool AdvanceCursor();
+  bool WriteBit(bool one);
+  bool Write8(uint8_t value);
+  bool ReadBit(uint8_t &value);
+  bool Read8(uint8_t &bit);
+
+  const static uint32_t sBitSize = 4;
+  const static uint32_t sBitThreshold = 60;
+  const static uint8_t sYOn = 0x80;
+  const static uint8_t sYOff = 0;
+
+  uint8_t* pYData;
+  uint32_t mStride;
+  uint32_t mWidth;
+  uint32_t mHeight;
+
+  struct Cursor {
+    Cursor(uint32_t x, uint32_t y):
+      x(x), y(y) {}
+    uint32_t x;
+    uint32_t y;
+  } mCursor;
+};
+
+}