Bug 970787 - Support yuv 4:4:4 4:2:2 video in VP8TrackEncoder by libyuv for color conversion. r=rillian
authorBenjamin Chen <bechen@mozilla.com>
Fri, 21 Feb 2014 16:35:13 +0800
changeset 190966 0c2dd5c132fb4223b61e00caf29d589a30a55d1d
parent 190965 2a16340cd6d5f203f2d9ad6cfbf0192cc368a4f8
child 190967 cb70becb6bf8b95b9711b916cef6a869fef985c7
push id474
push userasasaki@mozilla.com
push dateMon, 02 Jun 2014 21:01:02 +0000
treeherdermozilla-release@967f4cf1b31c [default view] [failures only]
perfherder[talos] [build metrics] [platform microbench] (compared to previous push)
reviewersrillian
bugs970787
milestone30.0a1
first release with
nightly linux32
nightly linux64
nightly mac
nightly win32
nightly win64
last release without
nightly linux32
nightly linux64
nightly mac
nightly win32
nightly win64
Bug 970787 - Support yuv 4:4:4 4:2:2 video in VP8TrackEncoder by libyuv for color conversion. r=rillian
content/media/encoder/VP8TrackEncoder.cpp
content/media/encoder/VP8TrackEncoder.h
content/media/encoder/moz.build
--- a/content/media/encoder/VP8TrackEncoder.cpp
+++ b/content/media/encoder/VP8TrackEncoder.cpp
@@ -4,31 +4,34 @@
  * You can obtain one at http://mozilla.org/MPL/2.0/. */
 
 #include "VP8TrackEncoder.h"
 #include "vpx/vp8cx.h"
 #include "vpx/vpx_encoder.h"
 #include "VideoUtils.h"
 #include "prsystem.h"
 #include "WebMWriter.h"
+#include "libyuv.h"
 
 namespace mozilla {
 
 #ifdef PR_LOGGING
 PRLogModuleInfo* gVP8TrackEncoderLog;
 #define VP8LOG(msg, ...) PR_LOG(gVP8TrackEncoderLog, PR_LOG_DEBUG, \
                                   (msg, ##__VA_ARGS__))
 // Debug logging macro with object pointer and class name.
 #else
 #define VP8LOG(msg, ...)
 #endif
 
 #define DEFAULT_BITRATE 2500 // in kbit/s
 #define DEFAULT_ENCODE_FRAMERATE 30
 
+using namespace mozilla::layers;
+
 VP8TrackEncoder::VP8TrackEncoder()
   : VideoTrackEncoder()
   , mEncodedFrameDuration(0)
   , mEncodedTimestamp(0)
   , mRemainingTicks(0)
   , mVPXContext(new vpx_codec_ctx_t())
   , mVPXImageWrapper(new vpx_image_t())
 {
@@ -221,60 +224,145 @@ VP8TrackEncoder::GetEncodedPartitions(En
 }
 
 void VP8TrackEncoder::PrepareMutedFrame()
 {
   if (mMuteFrame.IsEmpty()) {
     CreateMutedFrame(&mMuteFrame);
   }
 
-  uint32_t yPlanSize = mFrameWidth * mFrameHeight;
+  uint32_t yPlaneSize = mFrameWidth * mFrameHeight;
   uint32_t halfWidth = (mFrameWidth + 1) / 2;
   uint32_t halfHeight = (mFrameHeight + 1) / 2;
-  uint32_t uvPlanSize = halfWidth * halfHeight;
+  uint32_t uvPlaneSize = halfWidth * halfHeight;
 
-  MOZ_ASSERT(mMuteFrame.Length() >= (yPlanSize + uvPlanSize));
+  MOZ_ASSERT(mMuteFrame.Length() >= (yPlaneSize + uvPlaneSize * 2));
   uint8_t *y = mMuteFrame.Elements();
-  uint8_t *cb = mMuteFrame.Elements() + yPlanSize;
-  uint8_t *cr = mMuteFrame.Elements() + yPlanSize + uvPlanSize;
+  uint8_t *cb = mMuteFrame.Elements() + yPlaneSize;
+  uint8_t *cr = mMuteFrame.Elements() + yPlaneSize + uvPlaneSize;
 
   mVPXImageWrapper->planes[PLANE_Y] = y;
   mVPXImageWrapper->planes[PLANE_U] = cb;
   mVPXImageWrapper->planes[PLANE_V] = cr;
   mVPXImageWrapper->stride[VPX_PLANE_Y] = mFrameWidth;
   mVPXImageWrapper->stride[VPX_PLANE_U] = halfWidth;
   mVPXImageWrapper->stride[VPX_PLANE_V] = halfWidth;
 }
 
+static bool isYUV420(const PlanarYCbCrImage::Data *aData)
+{
+  if (aData->mYSize == aData->mCbCrSize * 2) {
+    return true;
+  }
+  return false;
+}
+
+static bool isYUV422(const PlanarYCbCrImage::Data *aData)
+{
+  if ((aData->mYSize.width == aData->mCbCrSize.width * 2) &&
+      (aData->mYSize.height == aData->mCbCrSize.height)) {
+    return true;
+  }
+  return false;
+}
+
+static bool isYUV444(const PlanarYCbCrImage::Data *aData)
+{
+  if (aData->mYSize == aData->mCbCrSize) {
+    return true;
+  }
+  return false;
+}
+
 nsresult VP8TrackEncoder::PrepareRawFrame(VideoChunk &aChunk)
 {
   if (aChunk.mFrame.GetForceBlack() || aChunk.IsNull()) {
     PrepareMutedFrame();
   } else {
-    layers::Image* img = aChunk.mFrame.GetImage();
+    Image* img = aChunk.mFrame.GetImage();
     ImageFormat format = img->GetFormat();
     if (format != ImageFormat::PLANAR_YCBCR) {
       VP8LOG("Unsupported video format\n");
       return NS_ERROR_FAILURE;
     }
 
     // Cast away constness b/c some of the accessors are non-const
-    layers::PlanarYCbCrImage* yuv =
-    const_cast<layers::PlanarYCbCrImage *>(static_cast<const layers::PlanarYCbCrImage *>(img));
+    PlanarYCbCrImage* yuv =
+    const_cast<PlanarYCbCrImage *>(static_cast<const PlanarYCbCrImage *>(img));
     // Big-time assumption here that this is all contiguous data coming
     // from getUserMedia or other sources.
     MOZ_ASSERT(yuv);
-    const layers::PlanarYCbCrImage::Data *data = yuv->GetData();
+    const PlanarYCbCrImage::Data *data = yuv->GetData();
+
+    if (isYUV420(data) && !data->mCbSkip) { // 420 planar
+      mVPXImageWrapper->planes[PLANE_Y] = data->mYChannel;
+      mVPXImageWrapper->planes[PLANE_U] = data->mCbChannel;
+      mVPXImageWrapper->planes[PLANE_V] = data->mCrChannel;
+      mVPXImageWrapper->stride[VPX_PLANE_Y] = data->mYStride;
+      mVPXImageWrapper->stride[VPX_PLANE_U] = data->mCbCrStride;
+      mVPXImageWrapper->stride[VPX_PLANE_V] = data->mCbCrStride;
+    } else {
+      uint32_t yPlaneSize = mFrameWidth * mFrameHeight;
+      uint32_t halfWidth = (mFrameWidth + 1) / 2;
+      uint32_t halfHeight = (mFrameHeight + 1) / 2;
+      uint32_t uvPlaneSize = halfWidth * halfHeight;
+      if (mI420Frame.IsEmpty()) {
+        mI420Frame.SetLength(yPlaneSize + uvPlaneSize * 2);
+      }
+
+      MOZ_ASSERT(mI420Frame.Length() >= (yPlaneSize + uvPlaneSize * 2));
+      uint8_t *y = mI420Frame.Elements();
+      uint8_t *cb = mI420Frame.Elements() + yPlaneSize;
+      uint8_t *cr = mI420Frame.Elements() + yPlaneSize + uvPlaneSize;
 
-    mVPXImageWrapper->planes[PLANE_Y] = data->mYChannel;
-    mVPXImageWrapper->planes[PLANE_U] = data->mCbChannel;
-    mVPXImageWrapper->planes[PLANE_V] = data->mCrChannel;
-    mVPXImageWrapper->stride[VPX_PLANE_Y] = data->mYStride;
-    mVPXImageWrapper->stride[VPX_PLANE_U] = data->mCbCrStride;
-    mVPXImageWrapper->stride[VPX_PLANE_V] = data->mCbCrStride;
+      if (isYUV420(data) && data->mCbSkip) {
+        // If mCbSkip is set, we assume it's nv12 or nv21.
+        if (data->mCbChannel < data->mCrChannel) { // nv12
+          libyuv::NV12ToI420(data->mYChannel, data->mYStride,
+                             data->mCbChannel, data->mCbCrStride,
+                             y, mFrameWidth,
+                             cb, halfWidth,
+                             cr, halfWidth,
+                             mFrameWidth, mFrameHeight);
+        } else { // nv21
+          libyuv::NV21ToI420(data->mYChannel, data->mYStride,
+                             data->mCrChannel, data->mCbCrStride,
+                             y, mFrameWidth,
+                             cb, halfWidth,
+                             cr, halfWidth,
+                             mFrameWidth, mFrameHeight);
+        }
+      } else if (isYUV444(data) && !data->mCbSkip) {
+        libyuv::I444ToI420(data->mYChannel, data->mYStride,
+                           data->mCbChannel, data->mCbCrStride,
+                           data->mCrChannel, data->mCbCrStride,
+                           y, mFrameWidth,
+                           cb, halfWidth,
+                           cr, halfWidth,
+                           mFrameWidth, mFrameHeight);
+      } else if (isYUV422(data) && !data->mCbSkip) {
+        libyuv::I422ToI420(data->mYChannel, data->mYStride,
+                           data->mCbChannel, data->mCbCrStride,
+                           data->mCrChannel, data->mCbCrStride,
+                           y, mFrameWidth,
+                           cb, halfWidth,
+                           cr, halfWidth,
+                           mFrameWidth, mFrameHeight);
+      } else {
+        VP8LOG("Unsupported planar format\n");
+        return NS_ERROR_NOT_IMPLEMENTED;
+      }
+
+      mVPXImageWrapper->planes[PLANE_Y] = y;
+      mVPXImageWrapper->planes[PLANE_U] = cb;
+      mVPXImageWrapper->planes[PLANE_V] = cr;
+      mVPXImageWrapper->stride[VPX_PLANE_Y] = mFrameWidth;
+      mVPXImageWrapper->stride[VPX_PLANE_U] = halfWidth;
+      mVPXImageWrapper->stride[VPX_PLANE_V] = halfWidth;
+    }
   }
   return NS_OK;
 }
 
 // These two define value used in GetNextEncodeOperation to determine the
 // EncodeOperation for next target frame.
 #define I_FRAME_RATIO (0.5)
 #define SKIP_FRAME_RATIO (0.75)
--- a/content/media/encoder/VP8TrackEncoder.h
+++ b/content/media/encoder/VP8TrackEncoder.h
@@ -69,16 +69,19 @@ private:
   // Encoded timestamp.
   TrackTicks mEncodedTimestamp;
   // Duration to the next encode frame.
   TrackTicks mRemainingTicks;
 
   // Muted frame, we only create it once.
   nsTArray<uint8_t> mMuteFrame;
 
+  // I420 frame, convert the 4:4:4, 4:2:2 to I420.
+  nsTArray<uint8_t> mI420Frame;
+
   /**
    * A local segment queue which takes the raw data out from mRawSegment in the
    * call of GetEncodedTrack(). Since we implement the fixed FPS encoding
    * policy, it needs to be global in order to store the leftover segments
    * taken from mRawSegment.
    */
   VideoSegment mSourceSegment;
 
--- a/content/media/encoder/moz.build
+++ b/content/media/encoder/moz.build
@@ -30,14 +30,15 @@ if CONFIG['MOZ_OPUS']:
 
 if CONFIG['MOZ_WEBM_ENCODER']:
     EXPORTS += ['VorbisTrackEncoder.h',
                 'VP8TrackEncoder.h',
     ]
     UNIFIED_SOURCES += ['VorbisTrackEncoder.cpp',
                         'VP8TrackEncoder.cpp',
     ]
+    LOCAL_INCLUDES += ['/media/libyuv/include']
 
 FAIL_ON_WARNINGS = True
 
 FINAL_LIBRARY = 'gklayout'
 
 include('/ipc/chromium/chromium-config.mozbuild')