Bug 1215089: P5. Convert 10/12 bits YUV image to 8 bits. r?mattwoodrow draft
authorJean-Yves Avenard <jyavenard@mozilla.com>
Tue, 11 Jul 2017 13:55:47 +0200
changeset 607753 3302a7dee6fa973ca4e565cc49046d9a748ac309
parent 607752 0f48af211acbdc5d8c09c6ff384cebd72ac79bd5
child 637137 390955c556768d0727fb878f45443ed25cdb6f2b
push id68099
push userbmo:jyavenard@mozilla.com
push dateWed, 12 Jul 2017 20:25:16 +0000
reviewersmattwoodrow
bugs1215089, 1379948
milestone56.0a1
Bug 1215089: P5. Convert 10/12 bits YUV image to 8 bits. r?mattwoodrow For now, convert 10/12 bits YUV image to 8 bits. Native support will be tracked in bug 1379948 MozReview-Commit-ID: 2xTjNb6mgPE
gfx/ycbcr/YCbCrUtils.cpp
--- a/gfx/ycbcr/YCbCrUtils.cpp
+++ b/gfx/ycbcr/YCbCrUtils.cpp
@@ -61,100 +61,183 @@ GetYCbCrToRGBDestFormatAndSize(const lay
     if (aData.mPicX != 0 || aData.mPicY != 0 || yuvtype == YV24)
       prescale = false;
   }
   if (!prescale) {
     aSuggestedSize = aData.mPicSize;
   }
 }
 
+static inline void
+ConvertYCbCr16to8Line(uint8_t* aDst,
+                      int aStride,
+                      const uint16_t* aSrc,
+                      int aStride16,
+                      int aWidth,
+                      int aHeight,
+                      int aDepth)
+{
+  uint16_t mask = (1 << aDepth) - 1;
+
+  for (int i = 0; i < aHeight; i++) {
+    for (int j = 0; j < aWidth; j++) {
+      uint16_t val = (aSrc[j] & mask) >> (aDepth - 8);
+      aDst[j] = val;
+    }
+    aDst += aStride;
+    aSrc += aStride16;
+  }
+}
+
 void
 ConvertYCbCrToRGB(const layers::PlanarYCbCrData& aData,
                   const SurfaceFormat& aDestFormat,
                   const IntSize& aDestSize,
                   unsigned char* aDestBuffer,
                   int32_t aStride)
 {
   // ConvertYCbCrToRGB et al. assume the chroma planes are rounded up if the
   // luma plane is odd sized.
   MOZ_ASSERT((aData.mCbCrSize.width == aData.mYSize.width ||
               aData.mCbCrSize.width == (aData.mYSize.width + 1) >> 1) &&
              (aData.mCbCrSize.height == aData.mYSize.height ||
               aData.mCbCrSize.height == (aData.mYSize.height + 1) >> 1));
+
+  // Used if converting to 8 bits YUV.
+  UniquePtr<uint8_t[]> yChannel;
+  UniquePtr<uint8_t[]> cbChannel;
+  UniquePtr<uint8_t[]> crChannel;
+  layers::PlanarYCbCrData dstData;
+
+  if (aData.mDepth != 8) {
+    // Convert to 8 bits data first.
+    dstData.mPicSize = aData.mPicSize;
+    dstData.mPicX = aData.mPicX;
+    dstData.mPicY = aData.mPicY;
+    dstData.mYSize = aData.mYSize;
+    // We align the destination stride to 32 bytes, so that libyuv can use
+    // SSE optimised code.
+    dstData.mYStride = (aData.mYSize.width + 31) & ~32u;
+    dstData.mCbCrSize = aData.mCbCrSize;
+    dstData.mCbCrStride = (aData.mCbCrSize.width + 31) & ~32u;
+    dstData.mYUVColorSpace = aData.mYUVColorSpace;
+    dstData.mDepth = 8;
+
+    size_t ySize = GetAlignedStride<1>(dstData.mYStride, aData.mYSize.height);
+    size_t cbcrSize =
+      GetAlignedStride<1>(dstData.mCbCrStride, aData.mCbCrSize.height);
+    if (ySize == 0 || cbcrSize == 0) {
+      return;
+    }
+    yChannel = MakeUnique<uint8_t[]>(ySize);
+    cbChannel = MakeUnique<uint8_t[]>(cbcrSize);
+    crChannel = MakeUnique<uint8_t[]>(cbcrSize);
+
+    dstData.mYChannel = yChannel.get();
+    dstData.mCbChannel = cbChannel.get();
+    dstData.mCrChannel = crChannel.get();
+
+    ConvertYCbCr16to8Line(dstData.mYChannel,
+                          dstData.mYStride,
+                          reinterpret_cast<uint16_t*>(aData.mYChannel),
+                          aData.mYStride / 2,
+                          aData.mYSize.width,
+                          aData.mYSize.height,
+                          aData.mDepth);
+
+    ConvertYCbCr16to8Line(dstData.mCbChannel,
+                          dstData.mCbCrStride,
+                          reinterpret_cast<uint16_t*>(aData.mCbChannel),
+                          aData.mCbCrStride / 2,
+                          aData.mCbCrSize.width,
+                          aData.mCbCrSize.height,
+                          aData.mDepth);
+
+    ConvertYCbCr16to8Line(dstData.mCrChannel,
+                          dstData.mCbCrStride,
+                          reinterpret_cast<uint16_t*>(aData.mCrChannel),
+                          aData.mCbCrStride / 2,
+                          aData.mCbCrSize.width,
+                          aData.mCbCrSize.height,
+                          aData.mDepth);
+  }
+
+  const layers::PlanarYCbCrData& srcData = aData.mDepth == 8 ? aData : dstData;
+
   YUVType yuvtype =
-    TypeFromSize(aData.mYSize.width,
-                 aData.mYSize.height,
-                 aData.mCbCrSize.width,
-                 aData.mCbCrSize.height);
+    TypeFromSize(srcData.mYSize.width,
+                 srcData.mYSize.height,
+                 srcData.mCbCrSize.width,
+                 srcData.mCbCrSize.height);
 
   // Convert from YCbCr to RGB now, scaling the image if needed.
-  if (aDestSize != aData.mPicSize) {
+  if (aDestSize != srcData.mPicSize) {
 #if defined(HAVE_YCBCR_TO_RGB565)
     if (aDestFormat == SurfaceFormat::R5G6B5_UINT16) {
-      ScaleYCbCrToRGB565(aData.mYChannel,
-                         aData.mCbChannel,
-                         aData.mCrChannel,
+      ScaleYCbCrToRGB565(srcData.mYChannel,
+                         srcData.mCbChannel,
+                         srcData.mCrChannel,
                          aDestBuffer,
-                         aData.mPicX,
-                         aData.mPicY,
-                         aData.mPicSize.width,
-                         aData.mPicSize.height,
+                         srcData.mPicX,
+                         srcData.mPicY,
+                         srcData.mPicSize.width,
+                         srcData.mPicSize.height,
                          aDestSize.width,
                          aDestSize.height,
-                         aData.mYStride,
-                         aData.mCbCrStride,
+                         srcData.mYStride,
+                         srcData.mCbCrStride,
                          aStride,
                          yuvtype,
                          FILTER_BILINEAR);
     } else
 #endif
-      ScaleYCbCrToRGB32(aData.mYChannel, //
-                        aData.mCbChannel,
-                        aData.mCrChannel,
+      ScaleYCbCrToRGB32(srcData.mYChannel, //
+                        srcData.mCbChannel,
+                        srcData.mCrChannel,
                         aDestBuffer,
-                        aData.mPicSize.width,
-                        aData.mPicSize.height,
+                        srcData.mPicSize.width,
+                        srcData.mPicSize.height,
                         aDestSize.width,
                         aDestSize.height,
-                        aData.mYStride,
-                        aData.mCbCrStride,
+                        srcData.mYStride,
+                        srcData.mCbCrStride,
                         aStride,
                         yuvtype,
-                        aData.mYUVColorSpace,
+                        srcData.mYUVColorSpace,
                         FILTER_BILINEAR);
   } else { // no prescale
 #if defined(HAVE_YCBCR_TO_RGB565)
     if (aDestFormat == SurfaceFormat::R5G6B5_UINT16) {
-      ConvertYCbCrToRGB565(aData.mYChannel,
-                           aData.mCbChannel,
-                           aData.mCrChannel,
+      ConvertYCbCrToRGB565(srcData.mYChannel,
+                           srcData.mCbChannel,
+                           srcData.mCrChannel,
                            aDestBuffer,
-                           aData.mPicX,
-                           aData.mPicY,
-                           aData.mPicSize.width,
-                           aData.mPicSize.height,
-                           aData.mYStride,
-                           aData.mCbCrStride,
+                           srcData.mPicX,
+                           srcData.mPicY,
+                           srcData.mPicSize.width,
+                           srcData.mPicSize.height,
+                           srcData.mYStride,
+                           srcData.mCbCrStride,
                            aStride,
                            yuvtype);
     } else // aDestFormat != SurfaceFormat::R5G6B5_UINT16
 #endif
-      ConvertYCbCrToRGB32(aData.mYChannel, //
-                          aData.mCbChannel,
-                          aData.mCrChannel,
+      ConvertYCbCrToRGB32(srcData.mYChannel, //
+                          srcData.mCbChannel,
+                          srcData.mCrChannel,
                           aDestBuffer,
-                          aData.mPicX,
-                          aData.mPicY,
-                          aData.mPicSize.width,
-                          aData.mPicSize.height,
-                          aData.mYStride,
-                          aData.mCbCrStride,
+                          srcData.mPicX,
+                          srcData.mPicY,
+                          srcData.mPicSize.width,
+                          srcData.mPicSize.height,
+                          srcData.mYStride,
+                          srcData.mCbCrStride,
                           aStride,
                           yuvtype,
-                          aData.mYUVColorSpace);
+                          srcData.mYUVColorSpace);
   }
 }
 
 void
 ConvertYCbCrAToARGB(const uint8_t* aSrcY,
                     const uint8_t* aSrcU,
                     const uint8_t* aSrcV,
                     const uint8_t* aSrcA,