Bug 1106958 - Use android.media.MediaCodec for decoding in WebRTC stack. r=snorp, r=gcp, r=ted
authorQiang Lu <qiang.lu@intel.com>
Wed, 10 Dec 2014 11:06:06 +0800
changeset 240974 ea790a14bc114388895a1527c6e78d7ff4633e13
parent 240973 98b0e8e0c1800f4771dc2629adf769327b3be965
child 240975 a8963c3c2c56476b6a49a08b15cc564854782113
push id58983
push userryanvm@gmail.com
push dateFri, 24 Apr 2015 18:49:25 +0000
treeherdermozilla-inbound@b04704294a81 [default view] [failures only]
perfherder[talos] [build metrics] [platform microbench] (compared to previous push)
reviewerssnorp, gcp, ted
bugs1106958
milestone40.0a1
first release with
nightly linux32
nightly linux64
nightly mac
nightly win32
nightly win64
last release without
nightly linux32
nightly linux64
nightly mac
nightly win32
nightly win64
Bug 1106958 - Use android.media.MediaCodec for decoding in WebRTC stack. r=snorp, r=gcp, r=ted
build/gyp.mozbuild
media/webrtc/moz.build
media/webrtc/signaling/signaling.gyp
media/webrtc/signaling/src/media-conduit/MediaCodecVideoCodec.cpp
media/webrtc/signaling/src/media-conduit/MediaCodecVideoCodec.h
media/webrtc/signaling/src/media-conduit/WebrtcMediaCodecVP8VideoCodec.cpp
media/webrtc/signaling/src/media-conduit/WebrtcMediaCodecVP8VideoCodec.h
media/webrtc/signaling/src/peerconnection/MediaPipelineFactory.cpp
--- a/build/gyp.mozbuild
+++ b/build/gyp.mozbuild
@@ -39,16 +39,17 @@ gyp_vars = {
     'use_glib': 1 if CONFIG['GLIB_LIBS'] else 0,
 
      # turn off mandatory use of NEON and instead use NEON detection
     'arm_neon': 0,
     'arm_neon_optional': 1,
 
     'moz_widget_toolkit_gonk': 0,
     'moz_webrtc_omx': 0,
+    'moz_webrtc_mediacodec': 0,
 
     # (for vp8) chromium sets to 0 also
     'use_temporal_layers': 0,
 
     # Creates AEC internal sample dump files in current directory
     'aec_debug_dump': 1,
 
     # Enable and force use of hardware AEC
@@ -75,16 +76,17 @@ elif os == 'Android':
         gyp_vars['build_with_gonk'] = 1
         gyp_vars['moz_widget_toolkit_gonk'] = 1
         gyp_vars['opus_complexity'] = 1
         if int(CONFIG['ANDROID_VERSION']) >= 18:
           gyp_vars['moz_webrtc_omx'] = 1
     else:
         gyp_vars.update(
             gtest_target_type='executable',
+            moz_webrtc_mediacodec=1,
             android_toolchain=CONFIG['ANDROID_TOOLCHAIN'],
         )
 
 flavors = {
     'WINNT': 'win',
     'Android': 'linux' if CONFIG['MOZ_WIDGET_TOOLKIT'] == 'gonk' else 'android',
     'Linux': 'linux',
     'Darwin': 'mac' if CONFIG['MOZ_WIDGET_TOOLKIT'] == 'cocoa' else 'ios',
--- a/media/webrtc/moz.build
+++ b/media/webrtc/moz.build
@@ -53,17 +53,19 @@ if CONFIG['MOZ_WEBRTC_SIGNALING']:
     )
     GYP_DIRS['signaling'].sandbox_vars['FINAL_LIBRARY'] = 'xul'
     # Excluded for various symbol conflicts
     signaling_non_unified_sources = [
         'signaling/src/common/browser_logging/CSFLog.cpp',
         'signaling/src/jsep/JsepSessionImpl.cpp',
         'signaling/src/media-conduit/AudioConduit.cpp',
         'signaling/src/media-conduit/CodecStatistics.cpp',
+        'signaling/src/media-conduit/MediaCodecVideoCodec.cpp',
         'signaling/src/media-conduit/VideoConduit.cpp',
+        'signaling/src/media-conduit/WebrtcMediaCodecVP8VideoCodec.cpp',
         'signaling/src/mediapipeline/MediaPipeline.cpp',
         'signaling/src/mediapipeline/MediaPipelineFilter.cpp',
         'signaling/src/mediapipeline/SrtpFlow.cpp',
         'signaling/src/peerconnection/MediaPipelineFactory.cpp',
         'signaling/src/peerconnection/MediaStreamList.cpp',
         'signaling/src/peerconnection/PeerConnectionCtx.cpp',
         'signaling/src/peerconnection/PeerConnectionImpl.cpp',
         'signaling/src/peerconnection/PeerConnectionMedia.cpp',
@@ -89,16 +91,17 @@ if CONFIG['MOZ_WIDGET_TOOLKIT'] != 'gonk
     GYP_DIRS['trunk/testing'].non_unified_sources += webrtc_non_unified_sources
 
     if CONFIG['MOZ_WEBRTC_SIGNALING']:
         GYP_DIRS += ['signalingtest']
         GYP_DIRS['signalingtest'].input = 'signaling/signaling.gyp'
         GYP_DIRS['signalingtest'].variables = gyp_vars.copy()
         GYP_DIRS['signalingtest'].variables.update(
             build_for_test=1,
+            moz_webrtc_mediacodec=0,
             build_for_standalone=0
         )
         GYP_DIRS['signalingtest'].non_unified_sources += signaling_non_unified_sources
         GYP_DIRS += ['signalingstandalone']
         GYP_DIRS['signalingstandalone'].input = 'signaling/signaling.gyp'
         GYP_DIRS['signalingstandalone'].variables = gyp_vars.copy()
         GYP_DIRS['signalingstandalone'].variables.update(
             build_for_test=0,
--- a/media/webrtc/signaling/signaling.gyp
+++ b/media/webrtc/signaling/signaling.gyp
@@ -211,16 +211,30 @@
             '-I$(ANDROID_SOURCE)/frameworks/native/include/media/openmax',
             '-I$(ANDROID_SOURCE)/frameworks/native/include',
             '-I$(ANDROID_SOURCE)/frameworks/native/opengl/include',
           ],
           'defines' : [
             'MOZ_WEBRTC_OMX'
           ],
         }],
+        ['moz_webrtc_mediacodec==1', {
+          'include_dirs': [
+            '../../../widget/android',
+          ],
+          'sources': [
+            './src/media-conduit/MediaCodecVideoCodec.h',
+            './src/media-conduit/WebrtcMediaCodecVP8VideoCodec.h',
+            './src/media-conduit/MediaCodecVideoCodec.cpp',
+            './src/media-conduit/WebrtcMediaCodecVP8VideoCodec.cpp',
+          ],
+          'defines' : [
+            'MOZ_WEBRTC_MEDIACODEC',
+          ],
+        }],
         ['(build_for_test==0) and (build_for_standalone==0)', {
           'defines' : [
             'MOZILLA_INTERNAL_API',
           ],
           'sources': [
             './src/peerconnection/MediaStreamList.cpp',
             './src/peerconnection/MediaStreamList.h',
             './src/peerconnection/WebrtcGlobalInformation.cpp',
new file mode 100644
--- /dev/null
+++ b/media/webrtc/signaling/src/media-conduit/MediaCodecVideoCodec.cpp
@@ -0,0 +1,31 @@
+/* This Source Code Form is subject to the terms of the Mozilla Public
+ * License, v. 2.0. If a copy of the MPL was not distributed with this file,
+ * You can obtain one at http://mozilla.org/MPL/2.0/. */
+
+#include "CSFLog.h"
+#include "nspr.h"
+
+#include "WebrtcMediaCodecVP8VideoCodec.h"
+#include "MediaCodecVideoCodec.h"
+
+namespace mozilla {
+
+static const char* logTag ="MediaCodecVideoCodec";
+
+VideoEncoder* MediaCodecVideoCodec::CreateEncoder(CodecType aCodecType) {
+  CSFLogDebug(logTag,  "%s ", __FUNCTION__);
+  if (aCodecType == CODEC_VP8) {
+     return new WebrtcMediaCodecVP8VideoEncoder();
+  }
+  return nullptr;
+}
+
+VideoDecoder* MediaCodecVideoCodec::CreateDecoder(CodecType aCodecType) {
+  CSFLogDebug(logTag,  "%s ", __FUNCTION__);
+  if (aCodecType == CODEC_VP8) {
+    return new WebrtcMediaCodecVP8VideoDecoder();
+  }
+  return nullptr;
+}
+
+}
new file mode 100644
--- /dev/null
+++ b/media/webrtc/signaling/src/media-conduit/MediaCodecVideoCodec.h
@@ -0,0 +1,31 @@
+/* This Source Code Form is subject to the terms of the Mozilla Public
+ * License, v. 2.0. If a copy of the MPL was not distributed with this file,
+ * You can obtain one at http://mozilla.org/MPL/2.0/. */
+
+#ifndef MediaCodecVideoCodec_h__
+#define MediaCodecVideoCodec_h__
+
+#include "MediaConduitInterface.h"
+
+namespace mozilla {
+class MediaCodecVideoCodec {
+ public:
+ enum CodecType {
+    CODEC_VP8,
+  };
+  /**
+   * Create encoder object for codec type |aCodecType|. Return |nullptr| when
+   * failed.
+   */
+  static VideoEncoder* CreateEncoder(CodecType aCodecType);
+
+  /**
+   * Create decoder object for codec type |aCodecType|. Return |nullptr| when
+   * failed.
+   */
+  static VideoDecoder* CreateDecoder(CodecType aCodecType);
+};
+
+}
+
+#endif // MediaCodecVideoCodec_h__
new file mode 100644
--- /dev/null
+++ b/media/webrtc/signaling/src/media-conduit/WebrtcMediaCodecVP8VideoCodec.cpp
@@ -0,0 +1,1004 @@
+/* This Source Code Form is subject to the terms of the Mozilla Public
+ * License, v. 2.0. If a copy of the MPL was not distributed with this file,
+ * You can obtain one at http://mozilla.org/MPL/2.0/. */
+
+#include <cstdio>
+#include <iostream>
+#include <queue>
+
+#include "CSFLog.h"
+#include "nspr.h"
+
+#include "MediaCodec.h"
+#include "WebrtcMediaCodecVP8VideoCodec.h"
+#include "AndroidJNIWrapper.h"
+#include "mozilla/Scoped.h"
+#include "mozilla/ArrayUtils.h"
+#include "nsThreadUtils.h"
+#include "mozilla/Monitor.h"
+#include "runnable_utils.h"
+
+#include "AudioConduit.h"
+#include "VideoConduit.h"
+#include "libyuv/convert_from.h"
+#include "libyuv/convert.h"
+#include "libyuv/row.h"
+
+#include <webrtc/common_video/libyuv/include/webrtc_libyuv.h>
+
+using namespace mozilla;
+using namespace mozilla::widget::sdk;
+
+static const int32_t DECODER_TIMEOUT = 10 * PR_USEC_PER_MSEC; // 10ms
+static const char MEDIACODEC_VIDEO_MIME_VP8[] = "video/x-vnd.on2.vp8";
+
+namespace mozilla {
+
+static const char* logTag ="WebrtcMediaCodecVP8VideoCodec";
+
+static MediaCodec::LocalRef CreateDecoder(const char* aMimeType)
+{
+  if (!aMimeType) {
+    return nullptr;
+  }
+
+  MediaCodec::LocalRef codec;
+  MediaCodec::CreateDecoderByType(aMimeType, &codec);
+  return codec;
+}
+
+static MediaCodec::LocalRef CreateEncoder(const char* aMimeType)
+{
+  if (!aMimeType) {
+    return nullptr;
+  }
+
+  MediaCodec::LocalRef codec;
+  MediaCodec::CreateEncoderByType(aMimeType, &codec);
+  return codec;
+}
+
+static void
+ShutdownThread(nsCOMPtr<nsIThread>& aThread)
+{
+  aThread->Shutdown();
+}
+
+// Base runnable class to repeatly pull MediaCodec output buffers in seperate thread.
+// How to use:
+// - implementing DrainOutput() to get output. Remember to return false to tell
+//   drain not to pop input queue.
+// - call QueueInput() to schedule a run to drain output. The input, aFrame,
+//   should contains corresponding info such as image size and timestamps for
+//   DrainOutput() implementation to construct data needed by encoded/decoded
+//   callbacks.
+class MediaCodecOutputDrain : public nsRunnable
+{
+public:
+  void Start() {
+    MonitorAutoLock lock(mMonitor);
+    if (mThread == nullptr) {
+      NS_NewNamedThread("OutputDrain", getter_AddRefs(mThread));
+    }
+    mEnding = false;
+    mThread->Dispatch(this, NS_DISPATCH_NORMAL);
+  }
+
+  void Stop() {
+    MonitorAutoLock lock(mMonitor);
+    mEnding = true;
+    lock.NotifyAll(); // In case Run() is waiting.
+
+    if (mThread != nullptr) {
+      MonitorAutoUnlock unlock(mMonitor);
+      NS_DispatchToMainThread(
+        WrapRunnableNM<decltype(&ShutdownThread),
+                       nsCOMPtr<nsIThread> >(&ShutdownThread, mThread));
+      mThread = nullptr;
+    }
+  }
+
+  void QueueInput(const EncodedFrame& aFrame)
+  {
+    MonitorAutoLock lock(mMonitor);
+
+    MOZ_ASSERT(mThread);
+
+    mInputFrames.push(aFrame);
+    // Notify Run() about queued input and it can start working.
+    lock.NotifyAll();
+  }
+
+  NS_IMETHODIMP Run() override
+  {
+    MOZ_ASSERT(mThread);
+
+    MonitorAutoLock lock(mMonitor);
+    while (true) {
+      if (mInputFrames.empty()) {
+        // Wait for new input.
+        lock.Wait();
+      }
+
+      if (mEnding) {
+        // Stop draining.
+        break;
+      }
+
+      MOZ_ASSERT(!mInputFrames.empty());
+      {
+        // Release monitor while draining because it's blocking.
+        MonitorAutoUnlock unlock(mMonitor);
+        DrainOutput();
+      }
+    }
+
+    return NS_OK;
+  }
+
+protected:
+  MediaCodecOutputDrain()
+    : mMonitor("MediaCodecOutputDrain monitor")
+    , mEnding(false)
+  {}
+
+  // Drain output buffer for input frame queue mInputFrames.
+  // mInputFrames contains info such as size and time of the input frames.
+  // We have to give a queue to handle encoder frame skips - we can input 10
+  // frames and get one back.  NOTE: any access of aInputFrames MUST be preceded
+  // locking mMonitor!
+
+  // Blocks waiting for decoded buffers, but for a limited period because
+  // we need to check for shutdown.
+  virtual bool DrainOutput() = 0;
+
+protected:
+  // This monitor protects all things below it, and is also used to
+  // wait/notify queued input.
+  Monitor mMonitor;
+  std::queue<EncodedFrame> mInputFrames;
+
+private:
+  // also protected by mMonitor
+  nsCOMPtr<nsIThread> mThread;
+  bool mEnding;
+};
+
+class WebrtcAndroidMediaCodec {
+public:
+  WebrtcAndroidMediaCodec()
+    : mEncoderCallback(nullptr)
+    , mDecoderCallback(nullptr)
+    , isStarted(false)
+    , mEnding(false) {
+    CSFLogDebug(logTag,  "%s ", __FUNCTION__);
+  }
+
+  virtual ~WebrtcAndroidMediaCodec() {
+  }
+
+  nsresult Configure(uint32_t width,
+                     uint32_t height,
+                     const jobject aSurface,
+                     uint32_t flags,
+                     const char* mime,
+                     bool encoder) {
+    CSFLogDebug(logTag,  "%s ", __FUNCTION__);
+    nsresult res = NS_OK;
+
+    if (!mCoder) {
+      mWidth = width;
+      mHeight = height;
+
+      MediaFormat::LocalRef format;
+
+      res = MediaFormat::CreateVideoFormat(nsCString(mime),
+                                     mWidth,
+                                     mHeight,
+                                     &format);
+
+      if (NS_FAILED(res)) {
+        CSFLogDebug(logTag, "WebrtcAndroidMediaCodec::%s, CreateVideoFormat failed err = %d", __FUNCTION__, res);
+        return NS_ERROR_FAILURE;
+      }
+
+      if (encoder) {
+        mCoder = CreateEncoder(mime);
+
+        if (NS_FAILED(res)) {
+          CSFLogDebug(logTag, "WebrtcAndroidMediaCodec::%s, CreateEncoderByType failed err = %d", __FUNCTION__, res);
+          return NS_ERROR_FAILURE;
+        }
+
+        res = format->SetInteger(nsCString("bitrate"), 1000*300);
+        res = format->SetInteger(nsCString("bitrate-mode"), 2);
+        res = format->SetInteger(nsCString("color-format"), 21);
+        res = format->SetInteger(nsCString("frame-rate"), 30);
+        res = format->SetInteger(nsCString("i-frame-interval"), 100);
+
+      } else {
+        mCoder = CreateDecoder(mime);
+        if (NS_FAILED(res)) {
+          CSFLogDebug(logTag, "WebrtcAndroidMediaCodec::%s, CreateDecoderByType failed err = %d", __FUNCTION__, res);
+          return NS_ERROR_FAILURE;
+        }
+      }
+      res = mCoder->Configure(format, nullptr, nullptr, flags);
+      if (NS_FAILED(res)) {
+        CSFLogDebug(logTag, "WebrtcAndroidMediaCodec::%s, err = %d", __FUNCTION__, res);
+      }
+    }
+
+    return res;
+  }
+
+  nsresult Start() {
+    CSFLogDebug(logTag,  "%s ", __FUNCTION__);
+
+    if (!mCoder) {
+      return NS_ERROR_FAILURE;
+    }
+
+    mEnding = false;
+
+    nsresult res;
+    res = mCoder->Start();
+    if (NS_FAILED(res)) {
+      CSFLogDebug(logTag, "WebrtcAndroidMediaCodec::%s, mCoder->start() return err = %d",
+                  __FUNCTION__, res);
+      return res;
+    }
+    isStarted = true;
+    return NS_OK;
+  }
+
+  nsresult Stop() {
+    CSFLogDebug(logTag,  "%s ", __FUNCTION__);
+    mEnding = true;
+
+    if (mOutputDrain != nullptr) {
+      mOutputDrain->Stop();
+      mOutputDrain = nullptr;
+    }
+
+    mCoder->Stop();
+    mCoder->Release();
+    isStarted = false;
+    return NS_OK;
+  }
+
+  void GenerateVideoFrame(
+      size_t width, size_t height, uint32_t timeStamp,
+      void* decoded,
+      webrtc::I420VideoFrame* videoFrame, int color_format) {
+
+    CSFLogDebug(logTag,  "%s ", __FUNCTION__);
+
+    // TODO: eliminate extra pixel copy/color conversion
+    size_t widthUV = (width + 1) / 2;
+    if (videoFrame->CreateEmptyFrame(width, height, width, widthUV, widthUV)) {
+      return;
+    }
+
+    uint8_t* src_nv12 = static_cast<uint8_t *>(decoded);
+    int src_nv12_y_size = width * height;
+
+    uint8_t* dstY = videoFrame->buffer(webrtc::kYPlane);
+    uint8_t* dstU = videoFrame->buffer(webrtc::kUPlane);
+    uint8_t* dstV = videoFrame->buffer(webrtc::kVPlane);
+
+    libyuv::NV12ToI420(src_nv12, width,
+                       src_nv12 + src_nv12_y_size, (width + 1) & ~1,
+                       dstY, width,
+                       dstU, (width + 1) / 2,
+                       dstV,
+                       (width + 1) / 2,
+                       width, height);
+
+    videoFrame->set_timestamp(timeStamp);
+  }
+
+  int32_t
+  FeedMediaCodecInput(
+      const webrtc::EncodedImage& inputImage,
+      int64_t renderTimeMs) {
+
+#ifdef WEBRTC_MEDIACODEC_DEBUG
+    uint32_t time = PR_IntervalNow();
+    CSFLogDebug(logTag,  "%s ", __FUNCTION__);
+#endif
+
+    int inputIndex = DequeueInputBuffer(DECODER_TIMEOUT);
+    if (inputIndex == -1) {
+      CSFLogError(logTag,  "%s equeue input buffer failed", __FUNCTION__);
+      return inputIndex;
+    }
+
+#ifdef WEBRTC_MEDIACODEC_DEBUG
+    CSFLogDebug(logTag,  "%s dequeue input buffer took %u ms", __FUNCTION__, PR_IntervalToMilliseconds(PR_IntervalNow()-time));
+    time = PR_IntervalNow();
+#endif
+
+    size_t size = inputImage._length;
+
+    JNIEnv* env = jsjni_GetJNIForThread();
+    jobject buffer = env->GetObjectArrayElement(mInputBuffers, inputIndex);
+    void* directBuffer = env->GetDirectBufferAddress(buffer);
+
+    PodCopy((uint8_t*)directBuffer, inputImage._buffer, size);
+
+    if (inputIndex >= 0) {
+      CSFLogError(logTag,  "%s queue input buffer inputIndex = %d", __FUNCTION__, inputIndex);
+      QueueInputBuffer(inputIndex, 0, size, renderTimeMs, 0);
+
+      {
+        if (mOutputDrain == nullptr) {
+          mOutputDrain = new OutputDrain(this);
+          mOutputDrain->Start();
+        }
+        EncodedFrame frame;
+        frame.width_ = mWidth;
+        frame.height_ = mHeight;
+        frame.timeStamp_ = inputImage._timeStamp;
+        frame.decode_timestamp_ = renderTimeMs;
+        mOutputDrain->QueueInput(frame);
+      }
+      env->DeleteLocalRef(buffer);
+    }
+
+    return inputIndex;
+  }
+
+  nsresult
+  DrainOutput(std::queue<EncodedFrame>& aInputFrames, Monitor& aMonitor) {
+    MOZ_ASSERT(mCoder != nullptr);
+    if (mCoder == nullptr) {
+      return NS_ERROR_FAILURE;
+    }
+
+#ifdef WEBRTC_MEDIACODEC_DEBUG
+    uint32_t time = PR_IntervalNow();
+#endif
+    nsresult res;
+    BufferInfo::LocalRef bufferInfo;
+    res = BufferInfo::New(&bufferInfo);
+    if (NS_FAILED(res)) {
+      CSFLogDebug(logTag, "WebrtcAndroidMediaCodec::%s, BufferInfo::New return err = %d",
+                  __FUNCTION__, res);
+      return res;
+    }
+    int32_t outputIndex = DequeueOutputBuffer(bufferInfo);
+
+    if (outputIndex == MediaCodec::INFO_TRY_AGAIN_LATER) {
+      // Not an error: output not available yet. Try later.
+      CSFLogDebug(logTag,  "%s dequeue output buffer try again:%d", __FUNCTION__, outputIndex);
+    } else if (outputIndex == MediaCodec::INFO_OUTPUT_FORMAT_CHANGED) {
+      // handle format change
+      CSFLogDebug(logTag,  "%s dequeue output buffer format changed:%d", __FUNCTION__, outputIndex);
+    } else if (outputIndex == MediaCodec::INFO_OUTPUT_BUFFERS_CHANGED) {
+      CSFLogDebug(logTag,  "%s dequeue output buffer changed:%d", __FUNCTION__, outputIndex);
+      GetOutputBuffers();
+    } else if (outputIndex < 0) {
+      CSFLogDebug(logTag,  "%s dequeue output buffer unknow error:%d", __FUNCTION__, outputIndex);
+      MonitorAutoLock lock(aMonitor);
+      aInputFrames.pop();
+    } else {
+#ifdef WEBRTC_MEDIACODEC_DEBUG
+      CSFLogDebug(logTag,  "%s dequeue output buffer# return status is %d took %u ms", __FUNCTION__, outputIndex, PR_IntervalToMilliseconds(PR_IntervalNow()-time));
+#endif
+      EncodedFrame frame;
+      {
+        MonitorAutoLock lock(aMonitor);
+        frame = aInputFrames.front();
+        aInputFrames.pop();
+      }
+
+      if (mEnding) {
+        ReleaseOutputBuffer(outputIndex, false);
+        return NS_OK;
+      }
+
+      JNIEnv* env = jsjni_GetJNIForThread();
+      jobject buffer = env->GetObjectArrayElement(mOutputBuffers, outputIndex);
+      if (buffer) {
+        // The buffer will be null on Android L if we are decoding to a Surface
+        void* directBuffer = env->GetDirectBufferAddress(buffer);
+
+        int color_format = 0;
+
+        CSFLogDebug(logTag,  "%s generate video frame, width = %d, height = %d, timeStamp_ = %d", __FUNCTION__, frame.width_, frame.height_, frame.timeStamp_);
+        GenerateVideoFrame(frame.width_, frame.height_, frame.timeStamp_, directBuffer, &mVideoFrame, color_format);
+        mDecoderCallback->Decoded(mVideoFrame);
+
+        ReleaseOutputBuffer(outputIndex, false);
+        env->DeleteLocalRef(buffer);
+      }
+    }
+    return NS_OK;
+  }
+
+  int32_t DequeueInputBuffer(int64_t time) {
+    nsresult res;
+    int32_t inputIndex;
+    res = mCoder->DequeueInputBuffer(time, &inputIndex);
+
+    if (NS_FAILED(res)) {
+      CSFLogDebug(logTag, "WebrtcAndroidMediaCodec::%s, mCoder->DequeueInputBuffer() return err = %d",
+                  __FUNCTION__, res);
+      return -1;
+    }
+    return inputIndex;
+  }
+
+  void QueueInputBuffer(int32_t inputIndex, int32_t offset, size_t size, int64_t renderTimes, int32_t flags) {
+    nsresult res = NS_OK;
+    res = mCoder->QueueInputBuffer(inputIndex, offset, size, renderTimes, flags);
+
+    if (NS_FAILED(res)) {
+      CSFLogDebug(logTag, "WebrtcAndroidMediaCodec::%s, mCoder->QueueInputBuffer() return err = %d",
+                  __FUNCTION__, res);
+    }
+  }
+
+  int32_t DequeueOutputBuffer(BufferInfo::Param aInfo) {
+    nsresult res;
+
+    int32_t outputStatus;
+    res = mCoder->DequeueOutputBuffer(aInfo, DECODER_TIMEOUT, &outputStatus);
+
+    if (NS_FAILED(res)) {
+      CSFLogDebug(logTag, "WebrtcAndroidMediaCodec::%s, mCoder->DequeueOutputBuffer() return err = %d",
+                  __FUNCTION__, res);
+      return -1;
+    }
+
+    return outputStatus;
+  }
+
+  void ReleaseOutputBuffer(int32_t index, bool flag) {
+    mCoder->ReleaseOutputBuffer(index, flag);
+  }
+
+  jobjectArray GetInputBuffers() {
+    JNIEnv* env = jsjni_GetJNIForThread();
+
+    if (mInputBuffers) {
+      env->DeleteGlobalRef(mInputBuffers);
+    }
+
+    nsresult res;
+    jni::ObjectArray::LocalRef inputBuffers;
+    res = mCoder->GetInputBuffers(&inputBuffers);
+    mInputBuffers = (jobjectArray) env->NewGlobalRef(inputBuffers.Get());
+    if (NS_FAILED(res)) {
+      CSFLogDebug(logTag, "WebrtcAndroidMediaCodec::%s, GetInputBuffers return err = %d",
+                  __FUNCTION__, res);
+      return nullptr;
+    }
+
+    return mInputBuffers;
+  }
+
+  jobjectArray GetOutputBuffers() {
+    JNIEnv* env = jsjni_GetJNIForThread();
+
+    if (mOutputBuffers) {
+      env->DeleteGlobalRef(mOutputBuffers);
+    }
+
+    nsresult res;
+    jni::ObjectArray::LocalRef outputBuffers;
+    res = mCoder->GetOutputBuffers(&outputBuffers);
+    mOutputBuffers = (jobjectArray) env->NewGlobalRef(outputBuffers.Get());
+    if (NS_FAILED(res)) {
+      CSFLogDebug(logTag, "WebrtcAndroidMediaCodec::%s, GetOutputBuffers return err = %d",
+                  __FUNCTION__, res);
+      return nullptr;
+    }
+
+    return mOutputBuffers;
+  }
+
+  void SetDecoderCallback(webrtc::DecodedImageCallback* aCallback) {
+    mDecoderCallback = aCallback;
+  }
+
+  void SetEncoderCallback(webrtc::EncodedImageCallback* aCallback) {
+    mEncoderCallback = aCallback;
+  }
+
+private:
+class OutputDrain : public MediaCodecOutputDrain
+  {
+  public:
+    OutputDrain(WebrtcAndroidMediaCodec* aMediaCodec)
+      : MediaCodecOutputDrain()
+      , mMediaCodec(aMediaCodec)
+    {}
+
+  protected:
+    virtual bool DrainOutput() override
+    {
+      return (mMediaCodec->DrainOutput(mInputFrames, mMonitor) == NS_OK);
+    }
+
+  private:
+    WebrtcAndroidMediaCodec* mMediaCodec;
+  };
+
+  friend class WebrtcMediaCodecVP8VideoEncoder;
+  friend class WebrtcMediaCodecVP8VideoDecoder;
+
+  MediaCodec::GlobalRef mCoder;
+  webrtc::EncodedImageCallback* mEncoderCallback;
+  webrtc::DecodedImageCallback* mDecoderCallback;
+  webrtc::I420VideoFrame mVideoFrame;
+
+  jobjectArray mInputBuffers;
+  jobjectArray mOutputBuffers;
+
+  RefPtr<OutputDrain> mOutputDrain;
+  uint32_t mWidth;
+  uint32_t mHeight;
+  bool isStarted;
+  bool mEnding;
+
+  NS_INLINE_DECL_THREADSAFE_REFCOUNTING(WebrtcAndroidMediaCodec)
+};
+
+static bool I420toNV12(uint8_t* dstY, uint16_t* dstUV, const webrtc::I420VideoFrame& inputImage) {
+  uint8_t* buffer = dstY;
+  uint8_t* dst_y = buffer;
+  int dst_stride_y = inputImage.stride(webrtc::kYPlane);
+  uint8_t* dst_uv = buffer + inputImage.stride(webrtc::kYPlane) *
+                    inputImage.height();
+  int dst_stride_uv = inputImage.stride(webrtc::kUPlane) * 2;
+
+  // Why NV12?  Because COLOR_FORMAT_YUV420_SEMIPLANAR.  Most hardware is NV12-friendly.
+  bool converted = !libyuv::I420ToNV12(inputImage.buffer(webrtc::kYPlane),
+                                       inputImage.stride(webrtc::kYPlane),
+                                       inputImage.buffer(webrtc::kUPlane),
+                                       inputImage.stride(webrtc::kUPlane),
+                                       inputImage.buffer(webrtc::kVPlane),
+                                       inputImage.stride(webrtc::kVPlane),
+                                       dst_y,
+                                       dst_stride_y,
+                                       dst_uv,
+                                       dst_stride_uv,
+                                       inputImage.width(),
+                                       inputImage.height());
+  return converted;
+}
+
+// Encoder.
+WebrtcMediaCodecVP8VideoEncoder::WebrtcMediaCodecVP8VideoEncoder()
+  : mTimestamp(0)
+  , mCallback(nullptr)
+  , mMediaCodecEncoder(nullptr) {
+  CSFLogDebug(logTag,  "%s ", __FUNCTION__);
+
+  memset(&mEncodedImage, 0, sizeof(mEncodedImage));
+}
+
+bool WebrtcMediaCodecVP8VideoEncoder::ResetInputBuffers() {
+  mInputBuffers = mMediaCodecEncoder->GetInputBuffers();
+
+  if (!mInputBuffers)
+    return false;
+
+  return true;
+}
+
+bool WebrtcMediaCodecVP8VideoEncoder::ResetOutputBuffers() {
+  mOutputBuffers = mMediaCodecEncoder->GetOutputBuffers();
+
+  if (!mOutputBuffers)
+    return false;
+
+  return true;
+}
+
+int32_t
+WebrtcMediaCodecVP8VideoEncoder::VerifyAndAllocate(const uint32_t minimumSize)
+{
+    if(minimumSize > mEncodedImage._size)
+    {
+        // create buffer of sufficient size
+        uint8_t* newBuffer = new uint8_t[minimumSize];
+        if (newBuffer == nullptr) {
+            return -1;
+        }
+        if(mEncodedImage._buffer) {
+            // copy old data
+            memcpy(newBuffer, mEncodedImage._buffer, mEncodedImage._size);
+            delete [] mEncodedImage._buffer;
+        }
+        mEncodedImage._buffer = newBuffer;
+        mEncodedImage._size = minimumSize;
+    }
+    return 0;
+}
+
+int32_t WebrtcMediaCodecVP8VideoEncoder::InitEncode(
+    const webrtc::VideoCodec* codecSettings,
+    int32_t numberOfCores,
+    uint32_t maxPayloadSize) {
+  mMaxPayloadSize = maxPayloadSize;
+  CSFLogDebug(logTag,  "%s, w = %d, h = %d", __FUNCTION__, codecSettings->width, codecSettings->height);
+
+  return WEBRTC_VIDEO_CODEC_OK;
+}
+
+int32_t WebrtcMediaCodecVP8VideoEncoder::Encode(
+    const webrtc::I420VideoFrame& inputImage,
+    const webrtc::CodecSpecificInfo* codecSpecificInfo,
+    const std::vector<webrtc::VideoFrameType>* frame_types) {
+  CSFLogDebug(logTag,  "%s, w = %d, h = %d", __FUNCTION__, inputImage.width(), inputImage.height());
+
+  if (!mMediaCodecEncoder) {
+    mMediaCodecEncoder = new WebrtcAndroidMediaCodec();
+  }
+
+  if (!mMediaCodecEncoder->isStarted) {
+    if (inputImage.width() == 0 || inputImage.height() == 0) {
+      return WEBRTC_VIDEO_CODEC_ERROR;
+    } else {
+      mFrameWidth = inputImage.width();
+      mFrameHeight = inputImage.height();
+    }
+
+    mMediaCodecEncoder->SetEncoderCallback(mCallback);
+    nsresult res = mMediaCodecEncoder->Configure(mFrameWidth, mFrameHeight, nullptr, MediaCodec::CONFIGURE_FLAG_ENCODE, MEDIACODEC_VIDEO_MIME_VP8, true /* encoder */);
+
+    if (res != NS_OK) {
+      CSFLogDebug(logTag,  "%s, encoder configure return err = %d",
+                  __FUNCTION__, res);
+      return WEBRTC_VIDEO_CODEC_ERROR;
+    }
+
+    res = mMediaCodecEncoder->Start();
+
+    if (NS_FAILED(res)) {
+      mMediaCodecEncoder->isStarted = false;
+      CSFLogDebug(logTag,  "%s start encoder. err = %d", __FUNCTION__, res);
+      return WEBRTC_VIDEO_CODEC_ERROR;
+    }
+
+    bool retBool = ResetInputBuffers();
+    if (!retBool) {
+      CSFLogDebug(logTag,  "%s ResetInputBuffers failed.", __FUNCTION__);
+      return WEBRTC_VIDEO_CODEC_ERROR;
+    }
+    retBool = ResetOutputBuffers();
+    if (!retBool) {
+      CSFLogDebug(logTag,  "%s ResetOutputBuffers failed.", __FUNCTION__);
+      return WEBRTC_VIDEO_CODEC_ERROR;
+    }
+
+    mMediaCodecEncoder->isStarted = true;
+  }
+
+#ifdef WEBRTC_MEDIACODEC_DEBUG
+  uint32_t time = PR_IntervalNow();
+#endif
+
+  size_t sizeY = inputImage.allocated_size(webrtc::kYPlane);
+  size_t sizeUV = inputImage.allocated_size(webrtc::kUPlane);
+  size_t size = sizeY + 2 * sizeUV;
+
+  int inputIndex = mMediaCodecEncoder->DequeueInputBuffer(DECODER_TIMEOUT);
+  if (inputIndex == -1) {
+    CSFLogError(logTag,  "%s dequeue input buffer failed", __FUNCTION__);
+    return inputIndex;
+  }
+
+#ifdef WEBRTC_MEDIACODEC_DEBUG
+  CSFLogDebug(logTag,  "%s WebrtcMediaCodecVP8VideoEncoder::Encode() dequeue OMX input buffer took %u ms", __FUNCTION__, PR_IntervalToMilliseconds(PR_IntervalNow()-time));
+#endif
+
+  if (inputIndex >= 0) {
+    JNIEnv* env = jsjni_GetJNIForThread();
+    jobject buffer = env->GetObjectArrayElement(mInputBuffers, inputIndex);
+    void* directBuffer = env->GetDirectBufferAddress(buffer);
+
+    uint8_t* dstY = static_cast<uint8_t*>(directBuffer);
+    uint16_t* dstUV = reinterpret_cast<uint16_t*>(dstY + sizeY);
+
+    bool converted = I420toNV12(dstY, dstUV, inputImage);
+    if (!converted) {
+      CSFLogError(logTag,  "%s WebrtcMediaCodecVP8VideoEncoder::Encode() convert input buffer to NV12 error.", __FUNCTION__);
+      return WEBRTC_VIDEO_CODEC_ERROR;
+    }
+
+    env->DeleteLocalRef(buffer);
+
+#ifdef WEBRTC_MEDIACODEC_DEBUG
+    time = PR_IntervalNow();
+    CSFLogError(logTag,  "%s queue input buffer inputIndex = %d", __FUNCTION__, inputIndex);
+#endif
+
+    mMediaCodecEncoder->QueueInputBuffer(inputIndex, 0, size, inputImage.render_time_ms() * PR_USEC_PER_MSEC /* ms to us */, 0);
+#ifdef WEBRTC_MEDIACODEC_DEBUG
+    CSFLogDebug(logTag,  "%s WebrtcMediaCodecVP8VideoEncoder::Encode() queue input buffer took %u ms", __FUNCTION__, PR_IntervalToMilliseconds(PR_IntervalNow()-time));
+#endif
+    mEncodedImage._encodedWidth = inputImage.width();
+    mEncodedImage._encodedHeight = inputImage.height();
+    mEncodedImage._timeStamp = inputImage.timestamp();
+    mEncodedImage.capture_time_ms_ = inputImage.timestamp();
+
+    nsresult res;
+    BufferInfo::LocalRef bufferInfo;
+    res = BufferInfo::New(&bufferInfo);
+    if (NS_FAILED(res)) {
+      CSFLogDebug(logTag, "WebrtcMediaCodecVP8VideoEncoder::%s, BufferInfo::New return err = %d",
+                  __FUNCTION__, res);
+      return -1;
+    }
+
+    int32_t outputIndex = mMediaCodecEncoder->DequeueOutputBuffer(bufferInfo);
+
+    if (outputIndex == MediaCodec::INFO_TRY_AGAIN_LATER) {
+      // Not an error: output not available yet. Try later.
+      CSFLogDebug(logTag,  "%s dequeue output buffer try again:%d", __FUNCTION__, outputIndex);
+    } else if (outputIndex == MediaCodec::INFO_OUTPUT_FORMAT_CHANGED) {
+      // handle format change
+      CSFLogDebug(logTag,  "%s dequeue output buffer format changed:%d", __FUNCTION__, outputIndex);
+    } else if (outputIndex == MediaCodec::INFO_OUTPUT_BUFFERS_CHANGED) {
+      CSFLogDebug(logTag,  "%s dequeue output buffer changed:%d", __FUNCTION__, outputIndex);
+      mMediaCodecEncoder->GetOutputBuffers();
+    } else if (outputIndex < 0) {
+      CSFLogDebug(logTag,  "%s dequeue output buffer unknow error:%d", __FUNCTION__, outputIndex);
+    } else {
+#ifdef WEBRTC_MEDIACODEC_DEBUG
+      CSFLogDebug(logTag,  "%s dequeue output buffer return status is %d took %u ms", __FUNCTION__, outputIndex, PR_IntervalToMilliseconds(PR_IntervalNow()-time));
+#endif
+
+      JNIEnv* env = jsjni_GetJNIForThread();
+      jobject buffer = env->GetObjectArrayElement(mOutputBuffers, outputIndex);
+      if (buffer) {
+        int32_t offset;
+        bufferInfo->Offset(&offset);
+        int32_t flags;
+        bufferInfo->Flags(&flags);
+
+        // The buffer will be null on Android L if we are decoding to a Surface
+        void* directBuffer = reinterpret_cast<uint8_t*>(env->GetDirectBufferAddress(buffer)) + offset;
+
+        if (flags == MediaCodec::BUFFER_FLAG_SYNC_FRAME) {
+          mEncodedImage._frameType = webrtc::kKeyFrame;
+        } else {
+          mEncodedImage._frameType = webrtc::kDeltaFrame;
+        }
+        mEncodedImage._completeFrame = true;
+
+        int32_t size;
+        bufferInfo->Size(&size);
+#ifdef WEBRTC_MEDIACODEC_DEBUG
+        CSFLogDebug(logTag,  "%s dequeue output buffer ok, index:%d, buffer size = %d, buffer offset = %d, flags = %d", __FUNCTION__, outputIndex, size, offset, flags);
+#endif
+
+        if(VerifyAndAllocate(size) == -1) {
+          CSFLogDebug(logTag,  "%s VerifyAndAllocate buffers failed", __FUNCTION__);
+          return WEBRTC_VIDEO_CODEC_ERROR;
+        }
+
+        mEncodedImage._length = size;
+
+        // xxx It's too bad the mediacodec API forces us to memcpy this....
+        // we should find a way that able to 'hold' the buffer or transfer it from inputImage (ping-pong
+        // buffers or select them from a small pool)
+        memcpy(mEncodedImage._buffer, directBuffer, mEncodedImage._length);
+
+        webrtc::CodecSpecificInfo info;
+        info.codecType = webrtc::kVideoCodecVP8;
+        info.codecSpecific.VP8.pictureId = -1;
+        info.codecSpecific.VP8.tl0PicIdx = -1;
+        info.codecSpecific.VP8.keyIdx = -1;
+        info.codecSpecific.VP8.temporalIdx = 1;
+
+        // Generate a header describing a single fragment.
+        webrtc::RTPFragmentationHeader header;
+        memset(&header, 0, sizeof(header));
+        header.VerifyAndAllocateFragmentationHeader(1);
+        header.fragmentationLength[0] = mEncodedImage._length;
+
+        mCallback->Encoded(mEncodedImage, &info, &header);
+
+        mMediaCodecEncoder->ReleaseOutputBuffer(outputIndex, false);
+        env->DeleteLocalRef(buffer);
+      }
+    }
+  }
+
+  return WEBRTC_VIDEO_CODEC_OK;
+}
+
+int32_t WebrtcMediaCodecVP8VideoEncoder::RegisterEncodeCompleteCallback(webrtc::EncodedImageCallback* callback) {
+  CSFLogDebug(logTag, "%s ", __FUNCTION__);
+  mCallback = callback;
+
+  return WEBRTC_VIDEO_CODEC_OK;
+}
+
+int32_t WebrtcMediaCodecVP8VideoEncoder::Release() {
+
+  CSFLogDebug(logTag, "%s ", __FUNCTION__);
+  delete mMediaCodecEncoder;
+  mMediaCodecEncoder = nullptr;
+
+  delete [] mEncodedImage._buffer;
+  mEncodedImage._buffer = nullptr;
+  mEncodedImage._size = 0;
+
+  return WEBRTC_VIDEO_CODEC_OK;
+}
+
+WebrtcMediaCodecVP8VideoEncoder::~WebrtcMediaCodecVP8VideoEncoder() {
+  CSFLogDebug(logTag,  "%s ", __FUNCTION__);
+  Release();
+}
+
+int32_t WebrtcMediaCodecVP8VideoEncoder::SetChannelParameters(uint32_t packetLoss, int rtt) {
+  CSFLogDebug(logTag,  "%s ", __FUNCTION__);
+  return WEBRTC_VIDEO_CODEC_OK;
+}
+
+int32_t WebrtcMediaCodecVP8VideoEncoder::SetRates(uint32_t newBitRate, uint32_t frameRate) {
+  CSFLogDebug(logTag,  "%s ", __FUNCTION__);
+  if (!mMediaCodecEncoder) {
+    return WEBRTC_VIDEO_CODEC_UNINITIALIZED;
+  }
+
+  // XXX
+  // 1. implement MediaCodec's setParameters method
+  // 2.find a way to initiate a Java Bundle instance as parameter for MediaCodec setParameters method.
+  // mMediaCodecEncoder->setParameters
+
+  return WEBRTC_VIDEO_CODEC_OK;
+}
+
+// Decoder.
+WebrtcMediaCodecVP8VideoDecoder::WebrtcMediaCodecVP8VideoDecoder()
+  : mCallback(nullptr)
+  , mFrameWidth(0)
+  , mFrameHeight(0)
+  , mMediaCodecDecoder(nullptr) {
+  CSFLogDebug(logTag,  "%s ", __FUNCTION__);
+}
+
+bool WebrtcMediaCodecVP8VideoDecoder::ResetInputBuffers() {
+  mInputBuffers = mMediaCodecDecoder->GetInputBuffers();
+
+  if (!mInputBuffers)
+    return false;
+
+  return true;
+}
+
+bool WebrtcMediaCodecVP8VideoDecoder::ResetOutputBuffers() {
+  mOutputBuffers = mMediaCodecDecoder->GetOutputBuffers();
+
+  if (!mOutputBuffers)
+    return false;
+
+  return true;
+}
+
+
+int32_t WebrtcMediaCodecVP8VideoDecoder::InitDecode(
+    const webrtc::VideoCodec* codecSettings,
+    int32_t numberOfCores) {
+
+  if (!mMediaCodecDecoder) {
+    mMediaCodecDecoder  = new WebrtcAndroidMediaCodec();
+  }
+
+  return WEBRTC_VIDEO_CODEC_OK;
+}
+
+int32_t WebrtcMediaCodecVP8VideoDecoder::Decode(
+    const webrtc::EncodedImage& inputImage,
+    bool missingFrames,
+    const webrtc::RTPFragmentationHeader* fragmentation,
+    const webrtc::CodecSpecificInfo* codecSpecificInfo,
+    int64_t renderTimeMs) {
+
+  CSFLogDebug(logTag,  "%s, renderTimeMs = %lld ", __FUNCTION__, renderTimeMs);
+
+  if (inputImage._length== 0 || !inputImage._buffer) {
+    CSFLogDebug(logTag,  "%s, input Image invalid. length = %d", __FUNCTION__, inputImage._length);
+    return WEBRTC_VIDEO_CODEC_ERROR;
+  }
+
+  if (inputImage._frameType == webrtc::kKeyFrame) {
+    CSFLogDebug(logTag,  "%s, inputImage is Golden frame",
+                  __FUNCTION__);
+    mFrameWidth = inputImage._encodedWidth;
+    mFrameHeight = inputImage._encodedHeight;
+  }
+
+  if (!mMediaCodecDecoder->isStarted) {
+    if (mFrameWidth == 0 || mFrameHeight == 0) {
+      return WEBRTC_VIDEO_CODEC_ERROR;
+    }
+
+    mMediaCodecDecoder->SetDecoderCallback(mCallback);
+    nsresult res = mMediaCodecDecoder->Configure(mFrameWidth, mFrameHeight, nullptr, 0, MEDIACODEC_VIDEO_MIME_VP8, false /* decoder */);
+
+    if (res != NS_OK) {
+      CSFLogDebug(logTag,  "%s, decoder configure return err = %d",
+                  __FUNCTION__, res);
+      return WEBRTC_VIDEO_CODEC_ERROR;
+    }
+
+    res = mMediaCodecDecoder->Start();
+
+    if (NS_FAILED(res)) {
+      mMediaCodecDecoder->isStarted = false;
+      CSFLogDebug(logTag,  "%s start decoder. err = %d", __FUNCTION__, res);
+      return WEBRTC_VIDEO_CODEC_ERROR;
+    }
+
+    bool retBool = ResetInputBuffers();
+    if (!retBool) {
+      CSFLogDebug(logTag,  "%s ResetInputBuffers failed.", __FUNCTION__);
+      return WEBRTC_VIDEO_CODEC_ERROR;
+    }
+    retBool = ResetOutputBuffers();
+    if (!retBool) {
+      CSFLogDebug(logTag,  "%s ResetOutputBuffers failed.", __FUNCTION__);
+      return WEBRTC_VIDEO_CODEC_ERROR;
+    }
+
+    mMediaCodecDecoder->isStarted = true;
+  }
+#ifdef WEBRTC_MEDIACODEC_DEBUG
+  uint32_t time = PR_IntervalNow();
+  CSFLogDebug(logTag,  "%s start decoder took %u ms", __FUNCTION__, PR_IntervalToMilliseconds(PR_IntervalNow()-time));
+#endif
+
+  bool feedFrame = true;
+  int32_t ret = WEBRTC_VIDEO_CODEC_ERROR;
+
+  while (feedFrame) {
+    ret = mMediaCodecDecoder->FeedMediaCodecInput(inputImage, renderTimeMs);
+    feedFrame = (ret == -1);
+  }
+
+  CSFLogDebug(logTag,  "%s end, ret = %d", __FUNCTION__, ret);
+
+  return ret;
+}
+
+void WebrtcMediaCodecVP8VideoDecoder::DecodeFrame(EncodedFrame* frame) {
+  CSFLogDebug(logTag,  "%s ", __FUNCTION__);
+}
+
+int32_t WebrtcMediaCodecVP8VideoDecoder::RegisterDecodeCompleteCallback(webrtc::DecodedImageCallback* callback) {
+  CSFLogDebug(logTag,  "%s ", __FUNCTION__);
+
+  mCallback = callback;
+  return WEBRTC_VIDEO_CODEC_OK;
+}
+
+int32_t WebrtcMediaCodecVP8VideoDecoder::Release() {
+  CSFLogDebug(logTag,  "%s ", __FUNCTION__);
+
+  delete mMediaCodecDecoder;
+  mMediaCodecDecoder = nullptr;
+
+  return WEBRTC_VIDEO_CODEC_OK;
+}
+
+WebrtcMediaCodecVP8VideoDecoder::~WebrtcMediaCodecVP8VideoDecoder() {
+  CSFLogDebug(logTag,  "%s ", __FUNCTION__);
+
+  Release();
+}
+
+int32_t WebrtcMediaCodecVP8VideoDecoder::Reset() {
+  CSFLogDebug(logTag,  "%s ", __FUNCTION__);
+  return WEBRTC_VIDEO_CODEC_OK;
+}
+
+}
new file mode 100644
--- /dev/null
+++ b/media/webrtc/signaling/src/media-conduit/WebrtcMediaCodecVP8VideoCodec.h
@@ -0,0 +1,114 @@
+/* This Source Code Form is subject to the terms of the Mozilla Public
+ * License, v. 2.0. If a copy of the MPL was not distributed with this file,
+ * You can obtain one at http://mozilla.org/MPL/2.0/. */
+
+#ifndef WebrtcMediaCodecVP8VideoCodec_h__
+#define WebrtcMediaCodecVP8VideoCodec_h__
+
+#include "mozilla/Mutex.h"
+#include "nsThreadUtils.h"
+#include "nsAutoPtr.h"
+
+#include "MediaConduitInterface.h"
+#include "AudioConduit.h"
+#include "VideoConduit.h"
+
+namespace mozilla {
+
+struct EncodedFrame {
+  uint32_t width_;
+  uint32_t height_;
+  uint32_t timeStamp_;
+  uint64_t decode_timestamp_;
+};
+
+class WebrtcAndroidMediaCodec;
+
+class WebrtcMediaCodecVP8VideoEncoder : public WebrtcVideoEncoder {
+public:
+  WebrtcMediaCodecVP8VideoEncoder();
+
+  virtual ~WebrtcMediaCodecVP8VideoEncoder() override;
+
+  // Implement VideoEncoder interface.
+  virtual const uint64_t PluginID() override { return 0; }
+
+  virtual int32_t InitEncode(const webrtc::VideoCodec* codecSettings,
+                              int32_t numberOfCores,
+                              uint32_t maxPayloadSize) override;
+
+  virtual int32_t Encode(const webrtc::I420VideoFrame& inputImage,
+                          const webrtc::CodecSpecificInfo* codecSpecificInfo,
+                          const std::vector<webrtc::VideoFrameType>* frame_types) override;
+
+  virtual int32_t RegisterEncodeCompleteCallback(webrtc::EncodedImageCallback* callback) override;
+
+  virtual int32_t Release() override;
+
+  virtual int32_t SetChannelParameters(uint32_t packetLoss, int rtt) override;
+
+  virtual int32_t SetRates(uint32_t newBitRate, uint32_t frameRate) override;
+
+private:
+  int32_t VerifyAndAllocate(const uint32_t minimumSize);
+  bool ResetInputBuffers();
+  bool ResetOutputBuffers();
+
+  size_t mMaxPayloadSize;
+  uint32_t mTimestamp;
+  webrtc::EncodedImage mEncodedImage;
+  webrtc::EncodedImageCallback* mCallback;
+  uint32_t mFrameWidth;
+  uint32_t mFrameHeight;
+
+  WebrtcAndroidMediaCodec* mMediaCodecEncoder;
+
+  jobjectArray mInputBuffers;
+  jobjectArray mOutputBuffers;
+};
+
+class WebrtcMediaCodecVP8VideoDecoder : public WebrtcVideoDecoder {
+public:
+  WebrtcMediaCodecVP8VideoDecoder();
+
+  virtual ~WebrtcMediaCodecVP8VideoDecoder() override;
+
+  // Implement VideoDecoder interface.
+  virtual const uint64_t PluginID() override { return 0; }
+
+  virtual int32_t InitDecode(const webrtc::VideoCodec* codecSettings,
+                              int32_t numberOfCores) override;
+
+  virtual int32_t Decode(const webrtc::EncodedImage& inputImage,
+                          bool missingFrames,
+                          const webrtc::RTPFragmentationHeader* fragmentation,
+                          const webrtc::CodecSpecificInfo*
+                          codecSpecificInfo = NULL,
+                          int64_t renderTimeMs = -1) override;
+
+  virtual int32_t RegisterDecodeCompleteCallback(webrtc::DecodedImageCallback* callback) override;
+
+  virtual int32_t Release() override;
+
+  virtual int32_t Reset() override;
+
+private:
+  void DecodeFrame(EncodedFrame* frame);
+  void RunCallback();
+  bool ResetInputBuffers();
+  bool ResetOutputBuffers();
+
+  webrtc::DecodedImageCallback* mCallback;
+
+  uint32_t mFrameWidth;
+  uint32_t mFrameHeight;
+
+  WebrtcAndroidMediaCodec* mMediaCodecDecoder;
+  jobjectArray mInputBuffers;
+  jobjectArray mOutputBuffers;
+
+};
+
+}
+
+#endif // WebrtcMediaCodecVP8VideoCodec_h__
--- a/media/webrtc/signaling/src/peerconnection/MediaPipelineFactory.cpp
+++ b/media/webrtc/signaling/src/peerconnection/MediaPipelineFactory.cpp
@@ -1,13 +1,15 @@
 /* This Source Code Form is subject to the terms of the Mozilla Public
  * License, v. 2.0. If a copy of the MPL was not distributed with this file,
  * You can obtain one at http://mozilla.org/MPL/2.0/. */
 
 #include "logging.h"
+#include "nsIGfxInfo.h"
+#include "nsServiceManagerUtils.h"
 
 #include "PeerConnectionImpl.h"
 #include "PeerConnectionMedia.h"
 #include "MediaPipelineFactory.h"
 #include "transportflow.h"
 #include "transportlayer.h"
 #include "transportlayerdtls.h"
 #include "transportlayerice.h"
@@ -25,16 +27,24 @@
 #endif
 
 #include "GmpVideoCodec.h"
 #ifdef MOZ_WEBRTC_OMX
 #include "OMXVideoCodec.h"
 #include "OMXCodecWrapper.h"
 #endif
 
+#ifdef MOZ_WEBRTC_MEDIACODEC
+#include "MediaCodecVideoCodec.h"
+#endif
+
+#ifdef MOZILLA_INTERNAL_API
+#include "mozilla/Preferences.h"
+#endif
+
 #include <stdlib.h>
 
 namespace mozilla {
 
 MOZ_MTLOG_MODULE("MediaPipelineFactory")
 
 static nsresult
 JsepCodecDescToCodecConfig(const JsepCodecDescription& aCodec,
@@ -841,17 +851,73 @@ MediaPipelineFactory::ConfigureVideoCode
 /*
  * Add external H.264 video codec.
  */
 MediaConduitErrorCode
 MediaPipelineFactory::EnsureExternalCodec(VideoSessionConduit& aConduit,
                                           VideoCodecConfig* aConfig,
                                           bool aIsSend)
 {
-  if (aConfig->mName == "VP8" || aConfig->mName == "VP9") {
+  if (aConfig->mName == "VP8") {
+#ifdef MOZ_WEBRTC_MEDIACODEC
+     if (aIsSend) {
+#ifdef MOZILLA_INTERNAL_API
+       bool enabled = mozilla::Preferences::GetBool("media.navigator.hardware.vp8_encode.acceleration_enabled", false);
+#else
+       bool enabled = false;
+#endif
+       if (enabled) {
+         nsCOMPtr<nsIGfxInfo> gfxInfo = do_GetService("@mozilla.org/gfx/info;1");
+         if (gfxInfo) {
+           int32_t status;
+           if (NS_SUCCEEDED(gfxInfo->GetFeatureStatus(nsIGfxInfo::FEATURE_WEBRTC_HW_ACCELERATION, &status))) {
+             if (status != nsIGfxInfo::FEATURE_STATUS_OK) {
+               NS_WARNING("VP8 encoder hardware is not whitelisted: disabling.\n");
+             } else {
+               VideoEncoder* encoder = nullptr;
+               encoder = MediaCodecVideoCodec::CreateEncoder(MediaCodecVideoCodec::CodecType::CODEC_VP8);
+               if (encoder) {
+                 return aConduit.SetExternalSendCodec(aConfig, encoder);
+               } else {
+                 return kMediaConduitNoError;
+               }
+             }
+           }
+         }
+       }
+     } else {
+#ifdef MOZILLA_INTERNAL_API
+       bool enabled = mozilla::Preferences::GetBool("media.navigator.hardware.vp8_decode.acceleration_enabled", false);
+#else
+       bool enabled = false;
+#endif
+       if (enabled) {
+         nsCOMPtr<nsIGfxInfo> gfxInfo = do_GetService("@mozilla.org/gfx/info;1");
+         if (gfxInfo) {
+           int32_t status;
+           if (NS_SUCCEEDED(gfxInfo->GetFeatureStatus(nsIGfxInfo::FEATURE_WEBRTC_HW_ACCELERATION, &status))) {
+             if (status != nsIGfxInfo::FEATURE_STATUS_OK) {
+               NS_WARNING("VP8 decoder hardware is not whitelisted: disabling.\n");
+             } else {
+
+               VideoDecoder* decoder;
+               decoder = MediaCodecVideoCodec::CreateDecoder(MediaCodecVideoCodec::CodecType::CODEC_VP8);
+               if (decoder) {
+                 return aConduit.SetExternalRecvCodec(aConfig, decoder);
+               } else {
+                 return kMediaConduitNoError;
+               }
+             }
+           }
+         }
+       }
+     }
+#endif
+     return kMediaConduitNoError;
+  } else if (aConfig->mName == "VP9") {
     return kMediaConduitNoError;
   } else if (aConfig->mName == "H264") {
     if (aConduit.CodecPluginID() != 0) {
       return kMediaConduitNoError;
     }
     // Register H.264 codec.
     if (aIsSend) {
       VideoEncoder* encoder = nullptr;