Bug 1043558 - Use gralloc for WebRTC camera preview r=jesup,nical
authorSotaro Ikeda <sikeda@mozilla.com>
Tue, 16 Dec 2014 07:11:48 -0800
changeset 219924 473ecad73b44b88f88c858727c9431f351d76820
parent 219923 47fdf63700085a4fb75f47ca78e87e7a89629521
child 219960 8401afdb6e6ce386dabe1138af96f38d24de48b3
child 219961 938abddb1a2d34e139afbaeaf0296a00a34b8255
push id27972
push userryanvm@gmail.com
push dateTue, 16 Dec 2014 20:14:14 +0000
treeherdermozilla-central@473ecad73b44 [default view] [failures only]
perfherder[talos] [build metrics] [platform microbench] (compared to previous push)
reviewersjesup, nical
bugs1043558
milestone37.0a1
first release with
nightly linux32
nightly linux64
nightly mac
nightly win32
nightly win64
last release without
nightly linux32
nightly linux64
nightly mac
nightly win32
nightly win64
Bug 1043558 - Use gralloc for WebRTC camera preview r=jesup,nical
dom/media/webrtc/MediaEngineGonkVideoSource.cpp
dom/media/webrtc/MediaEngineGonkVideoSource.h
gfx/layers/client/TextureClient.cpp
gfx/layers/client/TextureClient.h
gfx/layers/client/TextureClientRecycleAllocator.cpp
gfx/layers/client/TextureClientRecycleAllocator.h
gfx/layers/opengl/GrallocTextureClient.cpp
media/webrtc/signaling/src/media-conduit/VideoConduit.cpp
media/webrtc/signaling/src/mediapipeline/MediaPipeline.cpp
media/webrtc/trunk/webrtc/common_video/libyuv/webrtc_libyuv.cc
--- a/dom/media/webrtc/MediaEngineGonkVideoSource.cpp
+++ b/dom/media/webrtc/MediaEngineGonkVideoSource.cpp
@@ -3,16 +3,18 @@
  * You can obtain one at http://mozilla.org/MPL/2.0/. */
 #include "MediaEngineGonkVideoSource.h"
 
 #define LOG_TAG "MediaEngineGonkVideoSource"
 
 #include <utils/Log.h>
 
 #include "GrallocImages.h"
+#include "mozilla/layers/GrallocTextureClient.h"
+#include "mozilla/layers/ImageBridgeChild.h"
 #include "VideoUtils.h"
 #include "ScreenOrientation.h"
 
 #include "libyuv.h"
 #include "mtransport/runnable_utils.h"
 
 namespace mozilla {
 
@@ -23,16 +25,18 @@ using namespace mozilla::gfx;
 extern PRLogModuleInfo* GetMediaManagerLog();
 #define LOG(msg) PR_LOG(GetMediaManagerLog(), PR_LOG_DEBUG, msg)
 #define LOGFRAME(msg) PR_LOG(GetMediaManagerLog(), 6, msg)
 #else
 #define LOG(msg)
 #define LOGFRAME(msg)
 #endif
 
+#define WEBRTC_GONK_VIDEO_SOURCE_POOL_BUFFERS 10
+
 // We are subclassed from CameraControlListener, which implements a
 // threadsafe reference-count for us.
 NS_IMPL_QUERY_INTERFACE(MediaEngineGonkVideoSource, nsISupports)
 NS_IMPL_ADDREF_INHERITED(MediaEngineGonkVideoSource, CameraControlListener)
 NS_IMPL_RELEASE_INHERITED(MediaEngineGonkVideoSource, CameraControlListener)
 
 // Called if the graph thinks it's running out of buffered video; repeat
 // the last frame for whatever minimum period it think it needs. Note that
@@ -249,25 +253,29 @@ MediaEngineGonkVideoSource::AllocImpl() 
 
   mCameraControl = ICameraControl::Create(mCaptureIndex);
   if (mCameraControl) {
     mState = kAllocated;
     // Add this as a listener for CameraControl events. We don't need
     // to explicitly remove this--destroying the CameraControl object
     // in DeallocImpl() will do that for us.
     mCameraControl->AddListener(this);
+    mTextureClientAllocator =
+      new layers::TextureClientRecycleAllocator(layers::ImageBridgeChild::GetSingleton());
+    mTextureClientAllocator->SetMaxPoolSize(WEBRTC_GONK_VIDEO_SOURCE_POOL_BUFFERS);
   }
   mCallbackMonitor.Notify();
 }
 
 void
 MediaEngineGonkVideoSource::DeallocImpl() {
   MOZ_ASSERT(NS_IsMainThread());
 
   mCameraControl = nullptr;
+  mTextureClientAllocator = nullptr;
 }
 
 // The same algorithm from bug 840244
 static int
 GetRotateAmount(ScreenOrientation aScreen, int aCameraMountAngle, bool aBackCamera) {
   int screenAngle = 0;
   switch (aScreen) {
     case eScreenOrientation_PortraitPrimary:
@@ -573,66 +581,81 @@ MediaEngineGonkVideoSource::ConvertPixel
   }
 }
 
 void
 MediaEngineGonkVideoSource::RotateImage(layers::Image* aImage, uint32_t aWidth, uint32_t aHeight) {
   layers::GrallocImage *nativeImage = static_cast<layers::GrallocImage*>(aImage);
   android::sp<android::GraphicBuffer> graphicBuffer = nativeImage->GetGraphicBuffer();
   void *pMem = nullptr;
+  // Bug 1109957 size will be wrong if width or height are odd
   uint32_t size = aWidth * aHeight * 3 / 2;
+  MOZ_ASSERT(!(aWidth & 1) && !(aHeight & 1));
 
   graphicBuffer->lock(android::GraphicBuffer::USAGE_SW_READ_MASK, &pMem);
 
   uint8_t* srcPtr = static_cast<uint8_t*>(pMem);
   // Create a video frame and append it to the track.
-  nsRefPtr<layers::Image> image = mImageContainer->CreateImage(ImageFormat::PLANAR_YCBCR);
-  layers::PlanarYCbCrImage* videoImage = static_cast<layers::PlanarYCbCrImage*>(image.get());
+  ImageFormat format = ImageFormat::GRALLOC_PLANAR_YCBCR;
+  nsRefPtr<layers::Image> image = mImageContainer->CreateImage(format);
 
   uint32_t dstWidth;
   uint32_t dstHeight;
 
   if (mRotation == 90 || mRotation == 270) {
     dstWidth = aHeight;
     dstHeight = aWidth;
   } else {
     dstWidth = aWidth;
     dstHeight = aHeight;
   }
 
   uint32_t half_width = dstWidth / 2;
-  uint8_t* dstPtr = videoImage->AllocateAndGetNewBuffer(size);
+
+  layers::GrallocImage* videoImage = static_cast<layers::GrallocImage*>(image.get());
+  MOZ_ASSERT(mTextureClientAllocator);
+  RefPtr<layers::TextureClient> textureClient
+    = mTextureClientAllocator->CreateOrRecycleForDrawing(gfx::SurfaceFormat::YUV,
+                                                         gfx::IntSize(dstWidth, dstHeight),
+                                                         gfx::BackendType::NONE,
+                                                         layers::TextureFlags::DEFAULT,
+                                                         layers::ALLOC_DISALLOW_BUFFERTEXTURECLIENT);
+  if (!textureClient) {
+    return;
+  }
+  RefPtr<layers::GrallocTextureClientOGL> grallocTextureClient =
+    static_cast<layers::GrallocTextureClientOGL*>(textureClient.get());
+
+  android::sp<android::GraphicBuffer> destBuffer = grallocTextureClient->GetGraphicBuffer();
+
+  void* destMem = nullptr;
+  destBuffer->lock(android::GraphicBuffer::USAGE_SW_WRITE_OFTEN, &destMem);
+  uint8_t* dstPtr = static_cast<uint8_t*>(destMem);
+
+  int32_t yStride = destBuffer->getStride();
+  // Align to 16 bytes boundary
+  int32_t uvStride = ((yStride / 2) + 15) & ~0x0F;
+
   libyuv::ConvertToI420(srcPtr, size,
-                        dstPtr, dstWidth,
-                        dstPtr + (dstWidth * dstHeight), half_width,
-                        dstPtr + (dstWidth * dstHeight * 5 / 4), half_width,
+                        dstPtr, yStride,
+                        dstPtr + (yStride * dstHeight + (uvStride * dstHeight / 2)), uvStride,
+                        dstPtr + (yStride * dstHeight), uvStride,
                         0, 0,
                         aWidth, aHeight,
                         aWidth, aHeight,
                         static_cast<libyuv::RotationMode>(mRotation),
-                        ConvertPixelFormatToFOURCC(graphicBuffer->getPixelFormat()));
+                        libyuv::FOURCC_NV21);
+  destBuffer->unlock();
   graphicBuffer->unlock();
 
-  const uint8_t lumaBpp = 8;
-  const uint8_t chromaBpp = 4;
+  layers::GrallocImage::GrallocData data;
 
-  layers::PlanarYCbCrData data;
-  data.mYChannel = dstPtr;
-  data.mYSize = IntSize(dstWidth, dstHeight);
-  data.mYStride = dstWidth * lumaBpp / 8;
-  data.mCbCrStride = dstWidth * chromaBpp / 8;
-  data.mCbChannel = dstPtr + dstHeight * data.mYStride;
-  data.mCrChannel = data.mCbChannel +( dstHeight * data.mCbCrStride / 2);
-  data.mCbCrSize = IntSize(dstWidth / 2, dstHeight / 2);
-  data.mPicX = 0;
-  data.mPicY = 0;
-  data.mPicSize = IntSize(dstWidth, dstHeight);
-  data.mStereoMode = StereoMode::MONO;
-
-  videoImage->SetDataNoCopy(data);
+  data.mPicSize = gfx::IntSize(dstWidth, dstHeight);
+  data.mGraphicBuffer = textureClient;
+  videoImage->SetData(data);
 
   // implicitly releases last image
   mImage = image.forget();
 
   // Push the frame into the MSG with a minimal duration.  This will likely
   // mean we'll still get NotifyPull calls which will then return the same
   // frame again with a longer duration.  However, this means we won't
   // fail to get the frame in and drop frames.
--- a/dom/media/webrtc/MediaEngineGonkVideoSource.h
+++ b/dom/media/webrtc/MediaEngineGonkVideoSource.h
@@ -10,16 +10,17 @@
 #endif
 
 #include "CameraControlListener.h"
 #include "MediaEngineCameraVideoSource.h"
 
 #include "mozilla/Hal.h"
 #include "mozilla/ReentrantMonitor.h"
 #include "mozilla/dom/File.h"
+#include "mozilla/layers/TextureClientRecycleAllocator.h"
 
 namespace mozilla {
 
 /**
  * The B2G implementation of the MediaEngine interface.
  *
  * On B2G platform, member data may accessed from different thread after construction:
  *
@@ -107,13 +108,15 @@ protected:
   nsCOMPtr<nsIDOMFile> mLastCapture;
 
   // These are protected by mMonitor in parent class
   nsTArray<nsRefPtr<PhotoCallback>> mPhotoCallbacks;
   int mRotation;
   int mCameraAngle; // See dom/base/ScreenOrientation.h
   bool mBackCamera;
   bool mOrientationChanged; // True when screen rotates.
+
+  RefPtr<layers::TextureClientRecycleAllocator> mTextureClientAllocator;
 };
 
 } // namespace mozilla
 
 #endif // MediaEngineGonkVideoSource_h_
--- a/gfx/layers/client/TextureClient.cpp
+++ b/gfx/layers/client/TextureClient.cpp
@@ -381,16 +381,20 @@ TextureClient::CreateForDrawing(ISurface
 #endif
 
   MOZ_ASSERT(!texture || texture->CanExposeDrawTarget(), "texture cannot expose a DrawTarget?");
 
   if (texture && texture->AllocateForSurface(aSize, aAllocFlags)) {
     return texture;
   }
 
+  if (aAllocFlags & ALLOC_DISALLOW_BUFFERTEXTURECLIENT) {
+    return nullptr;
+  }
+
   if (texture) {
     NS_WARNING("Failed to allocate a TextureClient, falling back to BufferTextureClient.");
   }
 
   // Can't do any better than a buffer texture client.
   texture = CreateBufferTextureClient(aAllocator, aFormat, aTextureFlags, aMoz2DBackend);
 
   if (!texture->AllocateForSurface(aSize, aAllocFlags)) {
--- a/gfx/layers/client/TextureClient.h
+++ b/gfx/layers/client/TextureClient.h
@@ -66,17 +66,18 @@ class KeepAlive;
 /**
  * TextureClient is the abstraction that allows us to share data between the
  * content and the compositor side.
  */
 
 enum TextureAllocationFlags {
   ALLOC_DEFAULT = 0,
   ALLOC_CLEAR_BUFFER = 1,
-  ALLOC_CLEAR_BUFFER_WHITE = 2
+  ALLOC_CLEAR_BUFFER_WHITE = 2,
+  ALLOC_DISALLOW_BUFFERTEXTURECLIENT = 4
 };
 
 #ifdef XP_WIN
 typedef void* SyncHandle;
 #else
 typedef uintptr_t SyncHandle;
 #endif // XP_WIN
 
--- a/gfx/layers/client/TextureClientRecycleAllocator.cpp
+++ b/gfx/layers/client/TextureClientRecycleAllocator.cpp
@@ -18,16 +18,23 @@ namespace layers {
 
 class TextureClientRecycleAllocatorImp : public ISurfaceAllocator
 {
   ~TextureClientRecycleAllocatorImp();
 
 public:
   explicit TextureClientRecycleAllocatorImp(ISurfaceAllocator* aAllocator);
 
+  void SetMaxPoolSize(uint32_t aMax)
+  {
+    if (aMax > 0) {
+      mMaxPooledSize = aMax;
+    }
+  }
+
   // Creates and allocates a TextureClient.
   TemporaryRef<TextureClient>
   CreateOrRecycleForDrawing(gfx::SurfaceFormat aFormat,
                             gfx::IntSize aSize,
                             gfx::BackendType aMoz2dBackend,
                             TextureFlags aTextureFlags,
                             TextureAllocationFlags flags);
 
@@ -120,28 +127,28 @@ TextureClientRecycleAllocatorImp::Textur
 
 TextureClientRecycleAllocatorImp::~TextureClientRecycleAllocatorImp()
 {
   MOZ_ASSERT(mDestroyed);
   MOZ_ASSERT(mPooledClients.empty());
   MOZ_ASSERT(mInUseClients.empty());
 }
 
-
 TemporaryRef<TextureClient>
 TextureClientRecycleAllocatorImp::CreateOrRecycleForDrawing(
                                              gfx::SurfaceFormat aFormat,
                                              gfx::IntSize aSize,
                                              gfx::BackendType aMoz2DBackend,
                                              TextureFlags aTextureFlags,
                                              TextureAllocationFlags aAllocFlags)
 {
   // TextureAllocationFlags is actually used only by ContentClient.
   // This class does not handle ConteClient's TextureClient allocation.
-  MOZ_ASSERT(aAllocFlags == TextureAllocationFlags::ALLOC_DEFAULT);
+  MOZ_ASSERT(aAllocFlags == TextureAllocationFlags::ALLOC_DEFAULT ||
+             aAllocFlags == TextureAllocationFlags::ALLOC_DISALLOW_BUFFERTEXTURECLIENT);
   MOZ_ASSERT(!(aTextureFlags & TextureFlags::RECYCLE));
   aTextureFlags = aTextureFlags | TextureFlags::RECYCLE; // Set recycle flag
 
   RefPtr<TextureClientHolder> textureHolder;
 
   if (aMoz2DBackend == gfx::BackendType::NONE) {
     aMoz2DBackend = gfxPlatform::GetPlatform()->GetContentBackend();
   }
@@ -232,16 +239,21 @@ TextureClientRecycleAllocator::TextureCl
 }
 
 TextureClientRecycleAllocator::~TextureClientRecycleAllocator()
 {
   mAllocator->Destroy();
   mAllocator = nullptr;
 }
 
+void
+TextureClientRecycleAllocator::SetMaxPoolSize(uint32_t aMax)
+{
+  mAllocator->SetMaxPoolSize(aMax);
+}
 
 TemporaryRef<TextureClient>
 TextureClientRecycleAllocator::CreateOrRecycleForDrawing(
                                             gfx::SurfaceFormat aFormat,
                                             gfx::IntSize aSize,
                                             gfx::BackendType aMoz2DBackend,
                                             TextureFlags aTextureFlags,
                                             TextureAllocationFlags aAllocFlags)
--- a/gfx/layers/client/TextureClientRecycleAllocator.h
+++ b/gfx/layers/client/TextureClientRecycleAllocator.h
@@ -27,16 +27,18 @@ class TextureClientRecycleAllocator
 {
   ~TextureClientRecycleAllocator();
 
 public:
   NS_INLINE_DECL_THREADSAFE_REFCOUNTING(TextureClientRecycleAllocator)
 
   explicit TextureClientRecycleAllocator(ISurfaceAllocator* aAllocator);
 
+  void SetMaxPoolSize(uint32_t aMax);
+
   // Creates and allocates a TextureClient.
   TemporaryRef<TextureClient>
   CreateOrRecycleForDrawing(gfx::SurfaceFormat aFormat,
                             gfx::IntSize aSize,
                             gfx::BackendType aMoz2dBackend,
                             TextureFlags aTextureFlags,
                             TextureAllocationFlags flags = ALLOC_DEFAULT);
 
--- a/gfx/layers/opengl/GrallocTextureClient.cpp
+++ b/gfx/layers/opengl/GrallocTextureClient.cpp
@@ -166,16 +166,18 @@ SurfaceFormatForPixelFormat(android::Pix
   case PIXEL_FORMAT_RGBA_8888:
     return gfx::SurfaceFormat::R8G8B8A8;
   case PIXEL_FORMAT_BGRA_8888:
     return gfx::SurfaceFormat::B8G8R8A8;
   case PIXEL_FORMAT_RGBX_8888:
     return gfx::SurfaceFormat::R8G8B8X8;
   case PIXEL_FORMAT_RGB_565:
     return gfx::SurfaceFormat::R5G6B5;
+  case HAL_PIXEL_FORMAT_YV12:
+    return gfx::SurfaceFormat::YUV;
   default:
     MOZ_CRASH("Unknown gralloc pixel format");
   }
   return gfx::SurfaceFormat::R8G8B8A8;
 }
 
 gfx::DrawTarget*
 GrallocTextureClientOGL::BorrowDrawTarget()
@@ -225,16 +227,19 @@ GrallocTextureClientOGL::AllocateForSurf
     break;
   case gfx::SurfaceFormat::B8G8R8X8:
      format = android::PIXEL_FORMAT_RGBX_8888;
      mFlags |= TextureFlags::RB_SWAPPED;
     break;
   case gfx::SurfaceFormat::R5G6B5:
     format = android::PIXEL_FORMAT_RGB_565;
     break;
+  case gfx::SurfaceFormat::YUV:
+    format = HAL_PIXEL_FORMAT_YV12;
+    break;
   case gfx::SurfaceFormat::A8:
     NS_WARNING("gralloc does not support gfx::SurfaceFormat::A8");
     return false;
   default:
     NS_WARNING("Unsupported surface format");
     return false;
   }
 
--- a/media/webrtc/signaling/src/media-conduit/VideoConduit.cpp
+++ b/media/webrtc/signaling/src/media-conduit/VideoConduit.cpp
@@ -1055,29 +1055,25 @@ WebrtcVideoConduit::SendVideoFrame(unsig
   if(!video_frame || video_frame_length == 0 ||
      width == 0 || height == 0)
   {
     CSFLogError(logTag,  "%s Invalid Parameters ",__FUNCTION__);
     MOZ_ASSERT(PR_FALSE);
     return kMediaConduitMalformedArgument;
   }
 
-  webrtc::RawVideoType type;
-  switch (video_type) {
-    case kVideoI420:
-      type = webrtc::kVideoI420;
-      break;
-    case kVideoNV21:
-      type = webrtc::kVideoNV21;
-      break;
-    default:
-      CSFLogError(logTag,  "%s VideoType Invalid. Only 1420 and NV21 Supported",__FUNCTION__);
-      MOZ_ASSERT(PR_FALSE);
-      return kMediaConduitMalformedArgument;
+  // NOTE: update when common_types.h changes
+  if (video_type > kVideoBGRA) {
+    CSFLogError(logTag,  "%s VideoType %d Invalid", __FUNCTION__, video_type);
+    MOZ_ASSERT(PR_FALSE);
+    return kMediaConduitMalformedArgument;
   }
+  // RawVideoType == VideoType
+  webrtc::RawVideoType type = static_cast<webrtc::RawVideoType>((int)video_type);
+
   //Transmission should be enabled before we insert any frames.
   if(!mEngineTransmitting)
   {
     CSFLogError(logTag, "%s Engine not transmitting ", __FUNCTION__);
     return kMediaConduitSessionNotInited;
   }
 
   if (!SelectSendResolution(width, height))
--- a/media/webrtc/signaling/src/mediapipeline/MediaPipeline.cpp
+++ b/media/webrtc/signaling/src/mediapipeline/MediaPipeline.cpp
@@ -1170,25 +1170,46 @@ void MediaPipelineTransmit::PipelineList
   }
   last_img_ = serial;
 
   ImageFormat format = img->GetFormat();
 #ifdef WEBRTC_GONK
   if (format == ImageFormat::GRALLOC_PLANAR_YCBCR) {
     layers::GrallocImage *nativeImage = static_cast<layers::GrallocImage*>(img);
     android::sp<android::GraphicBuffer> graphicBuffer = nativeImage->GetGraphicBuffer();
+    int pixelFormat = graphicBuffer->getPixelFormat(); /* PixelFormat is an enum == int */
+    mozilla::VideoType destFormat;
+    switch (pixelFormat) {
+      case HAL_PIXEL_FORMAT_YV12:
+        // all android must support this
+        destFormat = mozilla::kVideoYV12;
+        break;
+      case layers::GrallocImage::HAL_PIXEL_FORMAT_YCbCr_420_SP:
+        destFormat = mozilla::kVideoNV21;
+        break;
+      case layers::GrallocImage::HAL_PIXEL_FORMAT_YCbCr_420_P:
+        destFormat = mozilla::kVideoI420;
+        break;
+      default:
+        // XXX Bug NNNNNNN
+        // use http://mxr.mozilla.org/mozilla-central/source/content/media/omx/I420ColorConverterHelper.cpp
+        // to convert unknown types (OEM-specific) to I420
+        MOZ_MTLOG(ML_ERROR, "Un-handled GRALLOC buffer type:" << pixelFormat);
+        MOZ_CRASH();
+    }
     void *basePtr;
     graphicBuffer->lock(android::GraphicBuffer::USAGE_SW_READ_MASK, &basePtr);
     uint32_t width = graphicBuffer->getWidth();
     uint32_t height = graphicBuffer->getHeight();
+    // XXX gralloc buffer's width and stride could be different depends on implementations.
     conduit->SendVideoFrame(static_cast<unsigned char*>(basePtr),
                             I420SIZE(width, height),
                             width,
                             height,
-                            mozilla::kVideoNV21, 0);
+                            destFormat, 0);
     graphicBuffer->unlock();
   } else
 #endif
   if (format == ImageFormat::PLANAR_YCBCR) {
     // Cast away constness b/c some of the accessors are non-const
     layers::PlanarYCbCrImage* yuv =
     const_cast<layers::PlanarYCbCrImage *>(
           static_cast<const layers::PlanarYCbCrImage *>(img));
--- a/media/webrtc/trunk/webrtc/common_video/libyuv/webrtc_libyuv.cc
+++ b/media/webrtc/trunk/webrtc/common_video/libyuv/webrtc_libyuv.cc
@@ -236,16 +236,38 @@ int ConvertToI420(VideoType src_video_ty
   int dst_width = dst_frame->width();
   int dst_height = dst_frame->height();
   // LibYuv expects pre-rotation values for dst.
   // Stride values should correspond to the destination values.
   if (rotation == kRotate90 || rotation == kRotate270) {
     dst_width = dst_frame->height();
     dst_height =dst_frame->width();
   }
+#ifdef WEBRTC_GONK
+  if (src_video_type == kYV12) {
+    // In gralloc buffer, yv12 color format's cb and cr's strides are aligned
+    // to 16 Bytes boundary. See /system/core/include/system/graphics.h
+    int stride_y = src_width;
+    int stride_uv = (((stride_y + 1) / 2) + 15) & ~0x0F;
+    return libyuv::I420Rotate(src_frame,
+                              stride_y,
+                              src_frame + (stride_y * src_height) + (stride_uv * ((src_height + 1) / 2)),
+                              stride_uv,
+                              src_frame + (stride_y * src_height),
+                              stride_uv,
+                              dst_frame->buffer(kYPlane),
+                              dst_frame->stride(kYPlane),
+                              dst_frame->buffer(kUPlane),
+                              dst_frame->stride(kUPlane),
+                              dst_frame->buffer(kVPlane),
+                              dst_frame->stride(kVPlane),
+                              src_width, src_height,
+                              ConvertRotationMode(rotation));
+  }
+#endif
   return libyuv::ConvertToI420(src_frame, sample_size,
                                dst_frame->buffer(kYPlane),
                                dst_frame->stride(kYPlane),
                                dst_frame->buffer(kUPlane),
                                dst_frame->stride(kUPlane),
                                dst_frame->buffer(kVPlane),
                                dst_frame->stride(kVPlane),
                                crop_x, crop_y,