Bug 691234: Part 2/3: Implement WebRTC backend for MediaEngine on Desktop; r=jesup, r=roc
☠☠ backed out by d28330b09a8a ☠ ☠
authorAnant Narayanan <anant@kix.in>
Wed, 11 Jul 2012 21:22:24 -0700
changeset 103831 038e8952133097d8ccfdcbf5d8a62b493479ffaa
parent 103830 431ab4d097c9796b81462fce6295d82b655f0cd4
child 103832 b53e8a614a4cd3a8747e0869fcb49ce3dcd3f964
push id1316
push userakeybl@mozilla.com
push dateMon, 27 Aug 2012 22:37:00 +0000
treeherdermozilla-beta@db4b09302ee2 [default view] [failures only]
perfherder[talos] [build metrics] [platform microbench] (compared to previous push)
reviewersjesup, roc
bugs691234
milestone16.0a1
first release with
nightly linux32
nightly linux64
nightly mac
nightly win32
nightly win64
last release without
nightly linux32
nightly linux64
nightly mac
nightly win32
nightly win64
Bug 691234: Part 2/3: Implement WebRTC backend for MediaEngine on Desktop; r=jesup, r=roc
content/media/webrtc/Makefile.in
content/media/webrtc/MediaEngineWebRTC.cpp
content/media/webrtc/MediaEngineWebRTC.h
content/media/webrtc/MediaEngineWebRTCAudio.cpp
content/media/webrtc/MediaEngineWebRTCVideo.cpp
--- a/content/media/webrtc/Makefile.in
+++ b/content/media/webrtc/Makefile.in
@@ -11,20 +11,24 @@ include $(DEPTH)/config/autoconf.mk
 
 MODULE = content
 LIBRARY_NAME = gkconwebrtc_s
 LIBXUL_LIBRARY = 1
 
 EXPORTS	+= \
   MediaEngine.h \
   MediaEngineDefault.h \
+  MediaEngineWebRTC.h \
   $(NULL)
 
 CPPSRCS	= \
   MediaEngineDefault.cpp \
+  MediaEngineWebRTC.cpp \
+  MediaEngineWebRTCVideo.cpp \
+  MediaEngineWebRTCAudio.cpp \
   $(NULL)
 
 FORCE_STATIC_LIB = 1
 
 include $(topsrcdir)/config/rules.mk
 
 LOCAL_INCLUDES += \
   -I$(topsrcdir)/media/webrtc/trunk/src \
new file mode 100644
--- /dev/null
+++ b/content/media/webrtc/MediaEngineWebRTC.cpp
@@ -0,0 +1,121 @@
+/* This Source Code Form is subject to the terms of the Mozilla Public
+ * License, v. 2.0. If a copy of the MPL was not distributed with this file,
+ * You can obtain one at http://mozilla.org/MPL/2.0/. */
+
+#include "MediaEngineWebRTC.h"
+
+namespace mozilla {
+
+void
+MediaEngineWebRTC::EnumerateVideoDevices(nsTArray<nsRefPtr<MediaEngineVideoSource> >* aVSources)
+{
+  webrtc::ViEBase* ptrViEBase;
+  webrtc::ViECapture* ptrViECapture;
+
+  if (!mVideoEngine) {
+    if (!(mVideoEngine = webrtc::VideoEngine::Create())) {
+      return;
+    }
+  }
+
+  ptrViEBase = webrtc::ViEBase::GetInterface(mVideoEngine);
+  if (!ptrViEBase) {
+    return;
+  }
+
+  if (!mVideoEngineInit) {
+    if (ptrViEBase->Init() < 0) {
+      return;
+    }
+    mVideoEngineInit = true;
+  }
+
+  ptrViECapture = webrtc::ViECapture::GetInterface(mVideoEngine);
+  if (!ptrViECapture) {
+    return;
+  }
+
+  int num = ptrViECapture->NumberOfCaptureDevices();
+  if (num <= 0) {
+    return;
+  }
+
+  for (int i = 0; i < num; i++) {
+    nsRefPtr<MediaEngineVideoSource> vSource = new MediaEngineWebRTCVideoSource(mVideoEngine, i);
+    aVSources->AppendElement(vSource.forget());
+  }
+
+  ptrViEBase->Release();
+  ptrViECapture->Release();
+
+  return;
+}
+
+void
+MediaEngineWebRTC::EnumerateAudioDevices(nsTArray<nsRefPtr<MediaEngineAudioSource> >* aASources)
+{
+  webrtc::VoEBase* ptrVoEBase = NULL;
+  webrtc::VoEHardware* ptrVoEHw = NULL;
+
+  if (!mVoiceEngine) {
+    mVoiceEngine = webrtc::VoiceEngine::Create();
+    if (!mVoiceEngine) {
+      return;
+    }
+  }
+
+  ptrVoEBase = webrtc::VoEBase::GetInterface(mVoiceEngine);
+  if (!ptrVoEBase) {
+    return;
+  }
+
+  if (!mAudioEngineInit) {
+    if (ptrVoEBase->Init() < 0) {
+      return;
+    }
+    mAudioEngineInit = true;
+  }
+
+  ptrVoEHw = webrtc::VoEHardware::GetInterface(mVoiceEngine);
+  if (!ptrVoEHw)  {
+    return;
+  }
+
+  int nDevices = 0;
+  ptrVoEHw->GetNumOfRecordingDevices(nDevices);
+  for (int i = 0; i < nDevices; i++) {
+    // We use constants here because GetRecordingDeviceName takes char[128].
+    char deviceName[128];
+    memset(deviceName, 0, sizeof(deviceName));
+
+    char uniqueID[128];
+    memset(uniqueID, 0, sizeof(uniqueID));
+
+    ptrVoEHw->GetRecordingDeviceName(i, deviceName, uniqueID);
+    nsRefPtr<MediaEngineAudioSource> aSource = new MediaEngineWebRTCAudioSource(
+      mVoiceEngine, i, deviceName, uniqueID
+    );
+    aASources->AppendElement(aSource.forget());
+  }
+
+  ptrVoEHw->Release();
+  ptrVoEBase->Release();
+}
+
+
+void
+MediaEngineWebRTC::Shutdown()
+{
+  if (mVideoEngine) {
+    webrtc::VideoEngine::Delete(mVideoEngine);
+  }
+
+  if (mVoiceEngine) {
+    webrtc::VoiceEngine::Delete(mVoiceEngine);
+  }
+
+  mVideoEngine = NULL;
+  mVoiceEngine = NULL;
+}
+
+}
new file mode 100644
--- /dev/null
+++ b/content/media/webrtc/MediaEngineWebRTC.h
@@ -0,0 +1,242 @@
+/* This Source Code Form is subject to the terms of the Mozilla Public
+ * License, v. 2.0. If a copy of the MPL was not distributed with this file,
+ * You can obtain one at http://mozilla.org/MPL/2.0/. */
+
+#ifndef MEDIAENGINEWEBRTC_H_
+#define MEDIAENGINEWEBRTC_H_
+
+#include "prmem.h"
+#include "prcvar.h"
+#include "prthread.h"
+#include "nsIThread.h"
+#include "nsIRunnable.h"
+
+#include "nsCOMPtr.h"
+#include "nsDOMFile.h"
+#include "nsThreadUtils.h"
+#include "nsDOMMediaStream.h"
+#include "nsDirectoryServiceDefs.h"
+#include "nsComponentManagerUtils.h"
+
+#include "Layers.h"
+#include "VideoUtils.h"
+#include "MediaEngine.h"
+#include "ImageLayers.h"
+#include "VideoSegment.h"
+#include "AudioSegment.h"
+#include "StreamBuffer.h"
+#include "MediaStreamGraph.h"
+
+// WebRTC library includes follow
+
+// Audio Engine
+#include "voice_engine/main/interface/voe_base.h"
+#include "voice_engine/main/interface/voe_codec.h"
+#include "voice_engine/main/interface/voe_hardware.h"
+#include "voice_engine/main/interface/voe_audio_processing.h"
+#include "voice_engine/main/interface/voe_volume_control.h"
+#include "voice_engine/main/interface/voe_external_media.h"
+
+// Video Engine
+#include "video_engine/include/vie_base.h"
+#include "video_engine/include/vie_codec.h"
+#include "video_engine/include/vie_render.h"
+#include "video_engine/include/vie_capture.h"
+#include "video_engine/include/vie_file.h"
+
+
+namespace mozilla {
+
+/**
+ * The WebRTC implementation of the MediaEngine interface.
+ */
+
+enum WebRTCEngineState {
+  kAllocated,
+  kStarted,
+  kStopped,
+  kReleased
+};
+
+class MediaEngineWebRTCVideoSource : public MediaEngineVideoSource,
+                                     public webrtc::ExternalRenderer,
+                                     public nsRunnable
+{
+public:
+  // ViEExternalRenderer.
+  virtual int FrameSizeChange(unsigned int, unsigned int, unsigned int);
+  virtual int DeliverFrame(unsigned char*, int, uint32_t, int64_t);
+
+  MediaEngineWebRTCVideoSource(webrtc::VideoEngine* videoEnginePtr,
+    int index, int aFps = 30)
+    : mVideoEngine(videoEnginePtr)
+    , mCapIndex(index)
+    , mWidth(640)
+    , mHeight(480)
+    , mState(kReleased)
+    , mMonitor("WebRTCCamera.Monitor")
+    , mFps(aFps)
+    , mInitDone(false)
+    , mInSnapshotMode(false)
+    , mSnapshotPath(NULL) { Init(); }
+
+  ~MediaEngineWebRTCVideoSource() { Shutdown(); }
+
+  virtual void GetName(nsAString&);
+  virtual void GetUUID(nsAString&);
+  virtual MediaEngineVideoOptions GetOptions();
+  virtual nsresult Allocate();
+  virtual nsresult Deallocate();
+  virtual nsresult Start(SourceMediaStream*, TrackID);
+  virtual nsresult Stop();
+  virtual nsresult Snapshot(PRUint32 aDuration, nsIDOMFile** aFile);
+
+  NS_DECL_ISUPPORTS
+
+  // This runnable is for creating a temporary file on the main thread.
+  NS_IMETHODIMP
+  Run()
+  {
+    nsCOMPtr<nsIFile> tmp;
+    nsresult rv = NS_GetSpecialDirectory(NS_OS_TEMP_DIR, getter_AddRefs(tmp));
+    NS_ENSURE_SUCCESS(rv, rv);
+
+    tmp->Append(NS_LITERAL_STRING("webrtc_snapshot.jpeg"));
+    rv = tmp->CreateUnique(nsIFile::NORMAL_FILE_TYPE, 0600);
+    NS_ENSURE_SUCCESS(rv, rv);
+
+    mSnapshotPath = new nsString();
+    rv = tmp->GetPath(*mSnapshotPath);
+    NS_ENSURE_SUCCESS(rv, rv);
+
+    return NS_OK;
+  }
+
+private:
+  static const unsigned int KMaxDeviceNameLength;
+  static const unsigned int KMaxUniqueIdLength;
+
+  // Initialize the needed Video engine interfaces.
+  void Init();
+  void Shutdown();
+
+  // Engine variables.
+
+  webrtc::VideoEngine* mVideoEngine; // Weak reference, don't free.
+  webrtc::ViEBase* mViEBase;
+  webrtc::ViECapture* mViECapture;
+  webrtc::ViERender* mViERender;
+  webrtc::CaptureCapability mCaps; // Doesn't work on OS X.
+
+  int mCapIndex;
+  int mWidth, mHeight;
+  TrackID mTrackID;
+
+  WebRTCEngineState mState;
+  mozilla::ReentrantMonitor mMonitor; // Monitor for processing WebRTC frames.
+  SourceMediaStream* mSource;
+
+  int mFps; // Track rate (30 fps by default)
+  bool mInitDone;
+  bool mInSnapshotMode;
+  nsString* mSnapshotPath;
+
+  nsRefPtr<layers::ImageContainer> mImageContainer;
+
+  PRLock* mSnapshotLock;
+  PRCondVar* mSnapshotCondVar;
+
+};
+
+class MediaEngineWebRTCAudioSource : public MediaEngineAudioSource,
+                                     public webrtc::VoEMediaProcess
+{
+public:
+  MediaEngineWebRTCAudioSource(webrtc::VoiceEngine* voiceEngine, int aIndex,
+    char* name, char* uuid)
+    : mVoiceEngine(voiceEngine)
+    , mMonitor("WebRTCMic.Monitor")
+    , mCapIndex(aIndex)
+    , mChannel(-1)
+    , mInitDone(false)
+    , mState(kReleased) {
+
+    mVoEBase = webrtc::VoEBase::GetInterface(mVoiceEngine);
+    mDeviceName.Assign(NS_ConvertASCIItoUTF16(name));
+    mDeviceUUID.Assign(NS_ConvertASCIItoUTF16(uuid));
+    mInitDone = true;
+  }
+
+  ~MediaEngineWebRTCAudioSource() { Shutdown(); }
+
+  virtual void GetName(nsAString&);
+  virtual void GetUUID(nsAString&);
+
+  virtual nsresult Allocate();
+  virtual nsresult Deallocate();
+  virtual nsresult Start(SourceMediaStream*, TrackID);
+  virtual nsresult Stop();
+  virtual nsresult Snapshot(PRUint32 aDuration, nsIDOMFile** aFile);
+
+  // VoEMediaProcess.
+  void Process(const int channel, const webrtc::ProcessingTypes type,
+               WebRtc_Word16 audio10ms[], const int length,
+               const int samplingFreq, const bool isStereo);
+
+  NS_DECL_ISUPPORTS
+
+private:
+  static const unsigned int KMaxDeviceNameLength;
+  static const unsigned int KMaxUniqueIdLength;
+
+  void Init();
+  void Shutdown();
+
+  webrtc::VoiceEngine* mVoiceEngine;
+  webrtc::VoEBase* mVoEBase;
+  webrtc::VoEExternalMedia* mVoERender;
+
+  mozilla::ReentrantMonitor mMonitor;
+
+  int mCapIndex;
+  int mChannel;
+  TrackID mTrackID;
+  bool mInitDone;
+  WebRTCEngineState mState;
+
+  nsString mDeviceName;
+  nsString mDeviceUUID;
+
+  SourceMediaStream* mSource;
+};
+
+class MediaEngineWebRTC : public MediaEngine
+{
+public:
+  MediaEngineWebRTC()
+  : mVideoEngine(NULL)
+  , mVoiceEngine(NULL)
+  , mVideoEngineInit(false)
+  , mAudioEngineInit(false) {}
+
+  ~MediaEngineWebRTC() { Shutdown(); }
+
+  // Clients should ensure to clean-up sources video/audio sources
+  // before invoking Shutdown on this class.
+  void Shutdown();
+
+  virtual void EnumerateVideoDevices(nsTArray<nsRefPtr<MediaEngineVideoSource> >*);
+  virtual void EnumerateAudioDevices(nsTArray<nsRefPtr<MediaEngineAudioSource> >*);
+
+private:
+  webrtc::VideoEngine* mVideoEngine;
+  webrtc::VoiceEngine* mVoiceEngine;
+
+  // Need this to avoid unneccesary WebRTC calls while enumerating.
+  bool mVideoEngineInit;
+  bool mAudioEngineInit;
+};
+
+}
+
+#endif /* NSMEDIAENGINEWEBRTC_H_ */
new file mode 100644
--- /dev/null
+++ b/content/media/webrtc/MediaEngineWebRTCAudio.cpp
@@ -0,0 +1,224 @@
+/* This Source Code Form is subject to the terms of the Mozilla Public
+ * License, v. 2.0. If a copy of the MPL was not distributed with this file,
+ * You can obtain one at http://mozilla.org/MPL/2.0/. */
+
+#include "MediaEngineWebRTC.h"
+
+#define CHANNELS 1
+#define ENCODING "L16"
+#define DEFAULT_PORT 5555
+
+#define SAMPLE_RATE 256000
+#define SAMPLE_FREQUENCY 16000
+#define SAMPLE_LENGTH ((SAMPLE_FREQUENCY*10)/1000)
+
+namespace mozilla {
+
+/**
+ * Webrtc audio source.
+ */
+NS_IMPL_THREADSAFE_ISUPPORTS0(MediaEngineWebRTCAudioSource)
+
+void
+MediaEngineWebRTCAudioSource::GetName(nsAString& aName)
+{
+  if (mInitDone) {
+    aName.Assign(mDeviceName);
+  }
+
+  return;
+}
+
+void
+MediaEngineWebRTCAudioSource::GetUUID(nsAString& aUUID)
+{
+  if (mInitDone) {
+    aUUID.Assign(mDeviceUUID);
+  }
+
+  return;
+}
+
+nsresult
+MediaEngineWebRTCAudioSource::Allocate()
+{
+  if (mState != kReleased) {
+    return NS_ERROR_FAILURE;
+  }
+
+  mVoEBase->Init();
+
+  mVoERender = webrtc::VoEExternalMedia::GetInterface(mVoiceEngine);
+  if (!mVoERender) {
+    return NS_ERROR_FAILURE;
+  }
+
+  mChannel = mVoEBase->CreateChannel();
+  if (mChannel < 0) {
+    return NS_ERROR_FAILURE;
+  }
+
+  // Check for availability.
+  webrtc::VoEHardware* ptrVoEHw = webrtc::VoEHardware::GetInterface(mVoiceEngine);
+  if (ptrVoEHw->SetRecordingDevice(mCapIndex)) {
+    return NS_ERROR_FAILURE;
+  }
+
+  bool avail = false;
+  ptrVoEHw->GetRecordingDeviceStatus(avail);
+  if (!avail) {
+    return NS_ERROR_FAILURE;
+  }
+
+  // Set "codec" to PCM, 32kHz on 1 channel
+  webrtc::VoECodec* ptrVoECodec;
+  webrtc::CodecInst codec;
+  ptrVoECodec = webrtc::VoECodec::GetInterface(mVoiceEngine);
+  if (!ptrVoECodec) {
+    return NS_ERROR_FAILURE;
+  }
+
+  strcpy(codec.plname, ENCODING);
+  codec.channels = CHANNELS;
+  codec.rate = SAMPLE_RATE;
+  codec.plfreq = SAMPLE_FREQUENCY;
+  codec.pacsize = SAMPLE_LENGTH;
+  codec.pltype = 0; // Default payload type
+
+  if (ptrVoECodec->SetSendCodec(mChannel, codec)) {
+    return NS_ERROR_FAILURE;
+  }
+
+  // Audio doesn't play through unless we set a receiver and destination, so
+  // we setup a dummy local destination, and do a loopback.
+  mVoEBase->SetLocalReceiver(mChannel, DEFAULT_PORT);
+  mVoEBase->SetSendDestination(mChannel, DEFAULT_PORT, "127.0.0.1");
+
+  mState = kAllocated;
+  return NS_OK;
+}
+
+nsresult
+MediaEngineWebRTCAudioSource::Deallocate()
+{
+  if (mState != kStopped && mState != kAllocated) {
+    return NS_ERROR_FAILURE;
+  }
+
+  mVoEBase->Terminate();
+  mVoERender->Release();
+
+  mState = kReleased;
+  return NS_OK;
+}
+
+nsresult
+MediaEngineWebRTCAudioSource::Start(SourceMediaStream* aStream, TrackID aID)
+{
+  if (!mInitDone || mState != kAllocated) {
+    return NS_ERROR_FAILURE;
+  }
+  if (!aStream) {
+    return NS_ERROR_FAILURE;
+  }
+
+  mSource = aStream;
+
+  AudioSegment* segment = new AudioSegment();
+  segment->Init(CHANNELS);
+  mSource->AddTrack(aID, SAMPLE_FREQUENCY, 0, segment);
+  mSource->AdvanceKnownTracksTime(STREAM_TIME_MAX);
+  mTrackID = aID;
+
+  if (mVoEBase->StartReceive(mChannel)) {
+    return NS_ERROR_FAILURE;
+  }
+  if (mVoEBase->StartSend(mChannel)) {
+    return NS_ERROR_FAILURE;
+  }
+
+  // Attach external media processor, so this::Process will be called.
+  mVoERender->RegisterExternalMediaProcessing(mChannel, webrtc::kRecordingPerChannel, *this);
+
+  mState = kStarted;
+  return NS_OK;
+}
+
+nsresult
+MediaEngineWebRTCAudioSource::Stop()
+{
+  if (mState != kStarted) {
+    return NS_ERROR_FAILURE;
+  }
+  if (!mVoEBase) {
+    return NS_ERROR_FAILURE;
+  }
+
+  mVoERender->DeRegisterExternalMediaProcessing(mChannel, webrtc::kRecordingPerChannel);
+
+  if (mVoEBase->StopSend(mChannel)) {
+    return NS_ERROR_FAILURE;
+  }
+  if (mVoEBase->StopReceive(mChannel)) {
+    return NS_ERROR_FAILURE;
+  }
+
+  mState = kStopped;
+  return NS_OK;
+}
+
+nsresult
+MediaEngineWebRTCAudioSource::Snapshot(PRUint32 aDuration, nsIDOMFile** aFile)
+{
+   return NS_ERROR_NOT_IMPLEMENTED;
+}
+
+
+void
+MediaEngineWebRTCAudioSource::Shutdown()
+{
+  if (!mInitDone) {
+    return;
+  }
+
+  if (mState == kStarted) {
+    Stop();
+  }
+
+  if (mState == kAllocated) {
+    Deallocate();
+  }
+
+  mVoEBase->Release();
+
+  mState = kReleased;
+  mInitDone = false;
+}
+
+typedef WebRtc_Word16 sample;
+
+void
+MediaEngineWebRTCAudioSource::Process(const int channel,
+  const webrtc::ProcessingTypes type, sample* audio10ms,
+  const int length, const int samplingFreq, const bool isStereo)
+{
+  ReentrantMonitorAutoEnter enter(mMonitor);
+
+  nsRefPtr<SharedBuffer> buffer = SharedBuffer::Create(length * sizeof(sample));
+
+  sample* dest = static_cast<sample*>(buffer->Data());
+  for (int i = 0; i < length; i++) {
+    dest[i] = audio10ms[i];
+  }
+
+  AudioSegment segment;
+  segment.Init(CHANNELS);
+  segment.AppendFrames(
+    buffer.forget(), length, 0, length, nsAudioStream::FORMAT_S16_LE
+  );
+  mSource->AppendToTrack(mTrackID, &segment);
+
+  return;
+}
+
+}
new file mode 100644
--- /dev/null
+++ b/content/media/webrtc/MediaEngineWebRTCVideo.cpp
@@ -0,0 +1,370 @@
+/* This Source Code Form is subject to the terms of the Mozilla Public
+ * License, v. 2.0. If a copy of the MPL was not distributed with this file,
+ * You can obtain one at http://mozilla.org/MPL/2.0/. */
+
+#include "MediaEngineWebRTC.h"
+
+namespace mozilla {
+
+/**
+ * Webrtc video source.
+ */
+NS_IMPL_THREADSAFE_ISUPPORTS1(MediaEngineWebRTCVideoSource, nsIRunnable)
+
+// Static variables to hold device names and UUIDs.
+const unsigned int MediaEngineWebRTCVideoSource::KMaxDeviceNameLength = 128;
+const unsigned int MediaEngineWebRTCVideoSource::KMaxUniqueIdLength = 256;
+
+// ViEExternalRenderer Callback.
+int
+MediaEngineWebRTCVideoSource::FrameSizeChange(
+   unsigned int w, unsigned int h, unsigned int streams)
+{
+  mWidth = w;
+  mHeight = h;
+  return 0;
+}
+
+// ViEExternalRenderer Callback. Process every incoming frame here.
+int
+MediaEngineWebRTCVideoSource::DeliverFrame(
+   unsigned char* buffer, int size, uint32_t time_stamp, int64_t render_time)
+{
+  ReentrantMonitorAutoEnter enter(mMonitor);
+
+  if (mInSnapshotMode) {
+    // Set the condition variable to false and notify Snapshot().
+    PR_Lock(mSnapshotLock);
+    mInSnapshotMode = false;
+    PR_NotifyCondVar(mSnapshotCondVar);
+    PR_Unlock(mSnapshotLock);
+    return 0;
+  }
+
+  // Check for proper state.
+  if (mState != kStarted) {
+    return 0;
+  }
+
+  // Create a video frame and append it to the track.
+  layers::Image::Format format = layers::Image::PLANAR_YCBCR;
+  nsRefPtr<layers::Image> image = mImageContainer->CreateImage(&format, 1);
+
+  layers::PlanarYCbCrImage* videoImage = static_cast<layers::PlanarYCbCrImage*>(image.get());
+
+  PRUint8* frame = static_cast<PRUint8*> (buffer);
+  const PRUint8 lumaBpp = 8;
+  const PRUint8 chromaBpp = 4;
+
+  layers::PlanarYCbCrImage::Data data;
+  data.mYChannel = frame;
+  data.mYSize = gfxIntSize(mWidth, mHeight);
+  data.mYStride = mWidth * lumaBpp/ 8;
+  data.mCbCrStride = mWidth * chromaBpp / 8;
+  data.mCbChannel = frame + mHeight * data.mYStride;
+  data.mCrChannel = data.mCbChannel + mHeight * data.mCbCrStride / 2;
+  data.mCbCrSize = gfxIntSize(mWidth/ 2, mHeight/ 2);
+  data.mPicX = 0;
+  data.mPicY = 0;
+  data.mPicSize = gfxIntSize(mWidth, mHeight);
+  data.mStereoMode = layers::STEREO_MODE_MONO;
+
+  videoImage->SetData(data);
+
+  VideoSegment segment;
+  segment.AppendFrame(image.forget(), 1, gfxIntSize(mWidth, mHeight));
+  mSource->AppendToTrack(mTrackID, &(segment));
+  return 0;
+}
+
+void
+MediaEngineWebRTCVideoSource::GetName(nsAString& aName)
+{
+  char deviceName[KMaxDeviceNameLength];
+  memset(deviceName, 0, KMaxDeviceNameLength);
+
+  char uniqueId[KMaxUniqueIdLength];
+  memset(uniqueId, 0, KMaxUniqueIdLength);
+
+  if (mInitDone) {
+    mViECapture->GetCaptureDevice(
+      mCapIndex, deviceName, KMaxDeviceNameLength, uniqueId, KMaxUniqueIdLength
+    );
+    aName.Assign(NS_ConvertASCIItoUTF16(deviceName));
+  }
+}
+
+void
+MediaEngineWebRTCVideoSource::GetUUID(nsAString& aUUID)
+{
+  char deviceName[KMaxDeviceNameLength];
+  memset(deviceName, 0, KMaxDeviceNameLength);
+
+  char uniqueId[KMaxUniqueIdLength];
+  memset(uniqueId, 0, KMaxUniqueIdLength);
+
+  if (mInitDone) {
+    mViECapture->GetCaptureDevice(
+      mCapIndex, deviceName, KMaxDeviceNameLength, uniqueId, KMaxUniqueIdLength
+    );
+    aUUID.Assign(NS_ConvertASCIItoUTF16(uniqueId));
+  }
+}
+
+nsresult
+MediaEngineWebRTCVideoSource::Allocate()
+{
+  if (mState != kReleased) {
+    return NS_ERROR_FAILURE;
+  }
+
+  char deviceName[KMaxDeviceNameLength];
+  memset(deviceName, 0, KMaxDeviceNameLength);
+
+  char uniqueId[KMaxUniqueIdLength];
+  memset(uniqueId, 0, KMaxUniqueIdLength);
+
+  mViECapture->GetCaptureDevice(
+    mCapIndex, deviceName, KMaxDeviceNameLength, uniqueId, KMaxUniqueIdLength
+  );
+
+  if (mViECapture->AllocateCaptureDevice(uniqueId, KMaxUniqueIdLength, mCapIndex)) {
+    return NS_ERROR_FAILURE;
+  }
+
+  if (mViECapture->StartCapture(mCapIndex) < 0) {
+    return NS_ERROR_FAILURE;
+  }
+
+  mState = kAllocated;
+  return NS_OK;
+}
+
+nsresult
+MediaEngineWebRTCVideoSource::Deallocate()
+{
+  if (mState != kStopped && mState != kAllocated) {
+    return NS_ERROR_FAILURE;
+  }
+
+  mViECapture->StopCapture(mCapIndex);
+  mViECapture->ReleaseCaptureDevice(mCapIndex);
+  mState = kReleased;
+  return NS_OK;
+}
+
+MediaEngineVideoOptions
+MediaEngineWebRTCVideoSource::GetOptions()
+{
+  MediaEngineVideoOptions aOpts;
+  aOpts.mWidth = mWidth;
+  aOpts.mHeight = mHeight;
+  aOpts.mMaxFPS = mFps;
+  aOpts.codecType = kVideoCodecI420;
+  return aOpts;
+}
+
+nsresult
+MediaEngineWebRTCVideoSource::Start(SourceMediaStream* aStream, TrackID aID)
+{
+  int error = 0;
+  if (!mInitDone || mState != kAllocated) {
+    return NS_ERROR_FAILURE;
+  }
+
+  if (!aStream) {
+    return NS_ERROR_FAILURE;
+  }
+
+  if (mState == kStarted) {
+    return NS_OK;
+  }
+
+  mSource = aStream;
+  mTrackID = aID;
+
+  mImageContainer = layers::LayerManager::CreateImageContainer();
+  mSource->AddTrack(aID, mFps, 0, new VideoSegment());
+  mSource->AdvanceKnownTracksTime(STREAM_TIME_MAX);
+
+  error = mViERender->AddRenderer(mCapIndex, webrtc::kVideoI420, (webrtc::ExternalRenderer*)this);
+  if (error == -1) {
+    return NS_ERROR_FAILURE;
+  }
+
+  error = mViERender->StartRender(mCapIndex);
+  if (error == -1) {
+    return NS_ERROR_FAILURE;
+  }
+
+  mState = kStarted;
+  return NS_OK;
+}
+
+nsresult
+MediaEngineWebRTCVideoSource::Stop()
+{
+  if (mState != kStarted) {
+    return NS_ERROR_FAILURE;
+  }
+
+  mSource->EndTrack(mTrackID);
+  mSource->Finish();
+
+  mViERender->StopRender(mCapIndex);
+  mViERender->RemoveRenderer(mCapIndex);
+
+  mState = kStopped;
+  return NS_OK;
+}
+
+nsresult
+MediaEngineWebRTCVideoSource::Snapshot(PRUint32 aDuration, nsIDOMFile** aFile)
+{
+  /**
+   * To get a Snapshot we do the following:
+   * - Set a condition variable (mInSnapshotMode) to true
+   * - Attach the external renderer and start the camera
+   * - Wait for the condition variable to change to false
+   *
+   * Starting the camera has the effect of invoking DeliverFrame() when
+   * the first frame arrives from the camera. We only need one frame for
+   * GetCaptureDeviceSnapshot to work, so we immediately set the condition
+   * variable to false and notify this method.
+   *
+   * This causes the current thread to continue (PR_CondWaitVar will return),
+   * at which point we can grab a snapshot, convert it to a file and
+   * return from this function after cleaning up the temporary stream object
+   * and caling Stop() on the media source.
+   */
+  *aFile = nsnull;
+  if (!mInitDone || mState != kAllocated) {
+    return NS_ERROR_FAILURE;
+  }
+
+  mSnapshotLock = PR_NewLock();
+  mSnapshotCondVar = PR_NewCondVar(mSnapshotLock);
+
+  PR_Lock(mSnapshotLock);
+  mInSnapshotMode = true;
+
+  // Start the rendering (equivalent to calling Start(), but without a track).
+  int error = 0;
+  if (!mInitDone || mState != kAllocated) {
+    return NS_ERROR_FAILURE;
+  }
+  error = mViERender->AddRenderer(mCapIndex, webrtc::kVideoI420, (webrtc::ExternalRenderer*)this);
+  if (error == -1) {
+    return NS_ERROR_FAILURE;
+  }
+  error = mViERender->StartRender(mCapIndex);
+  if (error == -1) {
+    return NS_ERROR_FAILURE;
+  }
+
+  // Wait for the condition variable, will be set in DeliverFrame.
+  // We use a while loop, because even if PR_WaitCondVar returns, it's not
+  // guaranteed that the condition variable changed.
+  while (mInSnapshotMode) {
+    PR_WaitCondVar(mSnapshotCondVar, PR_INTERVAL_NO_TIMEOUT);
+  }
+
+  // If we get here, DeliverFrame received at least one frame.
+  PR_Unlock(mSnapshotLock);
+  PR_DestroyCondVar(mSnapshotCondVar);
+  PR_DestroyLock(mSnapshotLock);
+
+  webrtc::ViEFile* vieFile = webrtc::ViEFile::GetInterface(mVideoEngine);
+  if (!vieFile) {
+    return NS_ERROR_FAILURE;
+  }
+
+  // Create a temporary file on the main thread and put the snapshot in it.
+  // See Run() in MediaEngineWebRTCVideo.h (sets mSnapshotPath).
+  NS_DispatchToMainThread(this, NS_DISPATCH_SYNC);
+
+  if (!mSnapshotPath) {
+    return NS_ERROR_FAILURE;
+  }
+
+  const char* path = NS_ConvertUTF16toUTF8(*mSnapshotPath).get();
+  if (vieFile->GetCaptureDeviceSnapshot(mCapIndex, path) < 0) {
+    delete mSnapshotPath;
+    mSnapshotPath = NULL;
+    return NS_ERROR_FAILURE;
+  }
+
+  // Stop the camera.
+  mViERender->StopRender(mCapIndex);
+  mViERender->RemoveRenderer(mCapIndex);
+
+  nsCOMPtr<nsIFile> file;
+  nsresult rv = NS_NewLocalFile(*mSnapshotPath, false, getter_AddRefs(file));
+
+  delete mSnapshotPath;
+  mSnapshotPath = NULL;
+
+  NS_ENSURE_SUCCESS(rv, rv);
+
+  NS_ADDREF(*aFile = new nsDOMFileFile(file));
+
+  return NS_OK;
+}
+
+/**
+ * Initialization and Shutdown functions for the video source, called by the
+ * constructor and destructor respectively.
+ */
+
+void
+MediaEngineWebRTCVideoSource::Init()
+{
+  if (mVideoEngine == NULL) {
+    return;
+  }
+
+  mViEBase = webrtc::ViEBase::GetInterface(mVideoEngine);
+  if (mViEBase == NULL) {
+    return;
+  }
+
+  // Get interfaces for capture, render for now
+  mViECapture = webrtc::ViECapture::GetInterface(mVideoEngine);
+  mViERender = webrtc::ViERender::GetInterface(mVideoEngine);
+
+  if (mViECapture == NULL || mViERender == NULL) {
+    return;
+  }
+
+  mInitDone = true;
+}
+
+void
+MediaEngineWebRTCVideoSource::Shutdown()
+{
+  bool continueShutdown = false;
+
+  if (!mInitDone) {
+    return;
+  }
+
+  if (mState == kStarted) {
+    mViERender->StopRender(mCapIndex);
+    mViERender->RemoveRenderer(mCapIndex);
+    continueShutdown = true;
+  }
+
+  if (mState == kAllocated || continueShutdown) {
+    mViECapture->StopCapture(mCapIndex);
+    mViECapture->ReleaseCaptureDevice(mCapIndex);
+    continueShutdown = false;
+  }
+
+  mViECapture->Release();
+  mViERender->Release();
+  mViEBase->Release();
+  mState = kReleased;
+  mInitDone = false;
+}
+
+}