Bug 1262278 - Rename PlanarYCbCrImage::SetData and PlanarYCbCrImage::SetDataNoCopy r=nical
authorSotaro Ikeda <sotaro.ikeda.g@gmail.com>
Mon, 18 Apr 2016 20:12:41 -0700
changeset 331604 49d1b18f19702797612126f92e037d70f59d860e
parent 331603 e995cbd2835c255c6fbae99a17d2cdfdace8fd8a
child 331605 04219460c57d3aa5533e4841b3603b0f3aa3bbca
push id6048
push userkmoir@mozilla.com
push dateMon, 06 Jun 2016 19:02:08 +0000
treeherdermozilla-beta@46d72a56c57d [default view] [failures only]
perfherder[talos] [build metrics] [platform microbench] (compared to previous push)
reviewersnical
bugs1262278
milestone48.0a1
first release with
nightly linux32
nightly linux64
nightly mac
nightly win32
nightly win64
last release without
nightly linux32
nightly linux64
nightly mac
nightly win32
nightly win64
Bug 1262278 - Rename PlanarYCbCrImage::SetData and PlanarYCbCrImage::SetDataNoCopy r=nical
dom/camera/GonkCameraControl.cpp
dom/media/MediaData.cpp
dom/media/MediaStreamGraph.cpp
dom/media/VideoSegment.cpp
dom/media/android/AndroidMediaReader.cpp
dom/media/gtest/TestVideoTrackEncoder.cpp
dom/media/omx/OMXCodecWrapper.cpp
dom/media/webrtc/MediaEngineDefault.cpp
dom/media/webrtc/MediaEngineGonkVideoSource.cpp
dom/media/webrtc/MediaEngineRemoteVideoSource.cpp
gfx/layers/GrallocImages.cpp
gfx/layers/GrallocImages.h
gfx/layers/ImageContainer.cpp
gfx/layers/ImageContainer.h
gfx/layers/basic/BasicImages.cpp
gfx/layers/ipc/SharedPlanarYCbCrImage.cpp
gfx/layers/ipc/SharedPlanarYCbCrImage.h
media/webrtc/signaling/src/media-conduit/WebrtcOMXH264VideoCodec.cpp
media/webrtc/signaling/src/mediapipeline/MediaPipeline.cpp
--- a/dom/camera/GonkCameraControl.cpp
+++ b/dom/camera/GonkCameraControl.cpp
@@ -2377,17 +2377,17 @@ nsGonkCameraControl::OnPoster(void* aDat
 void
 nsGonkCameraControl::OnNewPreviewFrame(layers::TextureClient* aBuffer)
 {
 #ifdef MOZ_WIDGET_GONK
   RefPtr<GrallocImage> frame = new GrallocImage();
 
   IntSize picSize(mCurrentConfiguration.mPreviewSize.width,
                   mCurrentConfiguration.mPreviewSize.height);
-  frame->SetData(aBuffer, picSize);
+  frame->AdoptData(aBuffer, picSize);
 
   if (mCapturePoster.exchange(false)) {
     CreatePoster(frame,
                  mCurrentConfiguration.mPreviewSize.width,
                  mCurrentConfiguration.mPreviewSize.height,
                  mVideoRotation);
     return;
   }
--- a/dom/media/MediaData.cpp
+++ b/dom/media/MediaData.cpp
@@ -242,19 +242,19 @@ bool VideoData::SetVideoDataToImage(Plan
   data.mCrSkip = Cr.mSkip;
   data.mPicX = aPicture.x;
   data.mPicY = aPicture.y;
   data.mPicSize = aPicture.Size();
   data.mStereoMode = aInfo.mStereoMode;
 
   aVideoImage->SetDelayedConversion(true);
   if (aCopyData) {
-    return aVideoImage->SetData(data);
+    return aVideoImage->CopyData(data);
   } else {
-    return aVideoImage->SetDataNoCopy(data);
+    return aVideoImage->AdoptData(data);
   }
 }
 
 /* static */
 already_AddRefed<VideoData>
 VideoData::Create(const VideoInfo& aInfo,
                   ImageContainer* aContainer,
                   Image* aImage,
@@ -475,17 +475,17 @@ VideoData::Create(const VideoInfo& aInfo
                                       aTime,
                                       aDuration,
                                       aKeyframe,
                                       aTimecode,
                                       aInfo.mDisplay,
                                       0));
 
   RefPtr<layers::GrallocImage> image = new layers::GrallocImage();
-  image->SetData(aBuffer, aPicture.Size());
+  image->AdoptData(aBuffer, aPicture.Size());
   v->mImage = image;
 
   return v.forget();
 }
 #endif  // MOZ_OMX_DECODER
 
 MediaRawData::MediaRawData()
   : MediaData(RAW_DATA, 0)
--- a/dom/media/MediaStreamGraph.cpp
+++ b/dom/media/MediaStreamGraph.cpp
@@ -885,17 +885,17 @@ SetImageToBlackPixel(PlanarYCbCrImage* a
   uint8_t blackPixel[] = { 0x10, 0x80, 0x80 };
 
   PlanarYCbCrData data;
   data.mYChannel = blackPixel;
   data.mCbChannel = blackPixel + 1;
   data.mCrChannel = blackPixel + 2;
   data.mYStride = data.mCbCrStride = 1;
   data.mPicSize = data.mYSize = data.mCbCrSize = IntSize(1, 1);
-  aImage->SetData(data);
+  aImage->CopyData(data);
 }
 
 class VideoFrameContainerInvalidateRunnable : public nsRunnable {
 public:
   explicit VideoFrameContainerInvalidateRunnable(VideoFrameContainer* aVideoFrameContainer)
     : mVideoFrameContainer(aVideoFrameContainer)
   {}
   NS_IMETHOD Run()
--- a/dom/media/VideoSegment.cpp
+++ b/dom/media/VideoSegment.cpp
@@ -74,18 +74,18 @@ VideoFrame::CreateBlackImage(const gfx::
   data.mCbChannel = frame.get() + aSize.height * data.mYStride;
   data.mCrChannel = data.mCbChannel + aSize.height * data.mCbCrStride / 2;
   data.mCbCrSize = gfx::IntSize(aSize.width / 2, aSize.height / 2);
   data.mPicX = 0;
   data.mPicY = 0;
   data.mPicSize = gfx::IntSize(aSize.width, aSize.height);
   data.mStereoMode = StereoMode::MONO;
 
-  // SetData copies data, so we can free data.
-  if (!image->SetData(data)) {
+  // Copies data, so we can free data.
+  if (!image->CopyData(data)) {
     MOZ_ASSERT(false);
     return nullptr;
   }
 
   return image.forget();
 }
 
 VideoChunk::VideoChunk()
--- a/dom/media/android/AndroidMediaReader.cpp
+++ b/dom/media/android/AndroidMediaReader.cpp
@@ -416,17 +416,17 @@ AndroidMediaReader::ImageBufferCallback:
   frameDesc.mYSkip = 0;
   frameDesc.mCbSkip = 0;
   frameDesc.mCrSkip = 0;
 
   frameDesc.mPicX = 0;
   frameDesc.mPicY = 0;
   frameDesc.mPicSize = IntSize(aWidth, aHeight);
 
-  yuvImage->SetDataNoCopy(frameDesc);
+  yuvImage->AdoptData(frameDesc);
 
   return buffer;
 }
 
 already_AddRefed<Image>
 AndroidMediaReader::ImageBufferCallback::GetImage()
 {
   return mImage.forget();
--- a/dom/media/gtest/TestVideoTrackEncoder.cpp
+++ b/dom/media/gtest/TestVideoTrackEncoder.cpp
@@ -82,17 +82,17 @@ private:
     data.mCbChannel = cb;
     data.mCbSkip = 0;
 
     // CrCb plane vectors.
     data.mCbCrStride = halfWidth;
     data.mCbCrSize.width = halfWidth;
     data.mCbCrSize.height = halfHeight;
 
-    image->SetData(data);
+    image->CopyData(data);
     return image;
   }
 
   Image *CreateNV12Image()
   {
     PlanarYCbCrImage *image = new RecyclingPlanarYCbCrImage(new BufferRecycleBin());
     PlanarYCbCrData data;
     data.mPicSize = mImageSize;
@@ -119,17 +119,17 @@ private:
     data.mCbChannel = cb;
     data.mCbSkip = 1;
 
     // 4:2:0.
     data.mCbCrStride = mImageSize.width;
     data.mCbCrSize.width = halfWidth;
     data.mCbCrSize.height = halfHeight;
 
-    image->SetData(data);
+    image->CopyData(data);
     return image;
   }
 
   Image *CreateNV21Image()
   {
     PlanarYCbCrImage *image = new RecyclingPlanarYCbCrImage(new BufferRecycleBin());
     PlanarYCbCrData data;
     data.mPicSize = mImageSize;
@@ -156,17 +156,17 @@ private:
     data.mCbChannel = cb;
     data.mCbSkip = 1;
 
     // 4:2:0.
     data.mCbCrStride = mImageSize.width;
     data.mCbCrSize.width = halfWidth;
     data.mCbCrSize.height = halfHeight;
 
-    image->SetData(data);
+    image->CopyData(data);
     return image;
   }
 
 private:
   mozilla::gfx::IntSize mImageSize;
   nsTArray<uint8_t> mSourceBuffer;
 };
 
--- a/dom/media/omx/OMXCodecWrapper.cpp
+++ b/dom/media/omx/OMXCodecWrapper.cpp
@@ -484,17 +484,17 @@ OMXVideoEncoder::Encode(const Image* aIm
   ImageFormat format = ImageFormat::PLANAR_YCBCR;
   if (img) {
     format = img->GetFormat();
     gfx::IntSize size = img->GetSize();
     // Validate input image.
     NS_ENSURE_TRUE(aWidth == size.width, NS_ERROR_INVALID_ARG);
     NS_ENSURE_TRUE(aHeight == size.height, NS_ERROR_INVALID_ARG);
     if (format == ImageFormat::PLANAR_YCBCR) {
-      // Test for data, allowing SetDataNoCopy() on an image without an mBuffer
+      // Test for data, allowing AdoptData() on an image without an mBuffer
       // (as used from WebrtcOMXH264VideoCodec, and a few other places) - bug 1067442
       const PlanarYCbCrData* yuv = static_cast<PlanarYCbCrImage*>(img)->GetData();
       NS_ENSURE_TRUE(yuv->mYChannel, NS_ERROR_INVALID_ARG);
     } else if (format == ImageFormat::GRALLOC_PLANAR_YCBCR) {
       // Reject unsupported gralloc-ed buffers.
       int halFormat = static_cast<GrallocImage*>(img)->GetGraphicBuffer()->getPixelFormat();
       NS_ENSURE_TRUE(halFormat == HAL_PIXEL_FORMAT_YCrCb_420_SP ||
                      halFormat == HAL_PIXEL_FORMAT_YV12 ||
--- a/dom/media/webrtc/MediaEngineDefault.cpp
+++ b/dom/media/webrtc/MediaEngineDefault.cpp
@@ -246,17 +246,17 @@ MediaEngineDefaultVideoSource::Notify(ns
 #ifdef MOZ_WEBRTC
   uint64_t timestamp = PR_Now();
   YuvStamper::Encode(mOpts.mWidth, mOpts.mHeight, mOpts.mWidth,
 		     data.mYChannel,
 		     reinterpret_cast<unsigned char*>(&timestamp), sizeof(timestamp),
 		     0, 0);
 #endif
 
-  bool setData = ycbcr_image->SetData(data);
+  bool setData = ycbcr_image->CopyData(data);
   MOZ_ASSERT(setData);
 
   // SetData copies data, so we can free the frame
   ReleaseFrame(data);
 
   if (!setData) {
     return NS_ERROR_FAILURE;
   }
--- a/dom/media/webrtc/MediaEngineGonkVideoSource.cpp
+++ b/dom/media/webrtc/MediaEngineGonkVideoSource.cpp
@@ -784,17 +784,17 @@ MediaEngineGonkVideoSource::RotateImage(
                           dstPtr + (yStride * dstHeight), uvStride,
                           0, 0,
                           graphicBuffer->getStride(), aHeight,
                           aWidth, aHeight,
                           static_cast<libyuv::RotationMode>(mRotation),
                           libyuv::FOURCC_NV21);
     destBuffer->unlock();
 
-    image->AsGrallocImage()->SetData(textureClient, gfx::IntSize(dstWidth, dstHeight));
+    image->AsGrallocImage()->AdoptData(textureClient, gfx::IntSize(dstWidth, dstHeight));
   } else {
     // Handle out of gralloc case.
     image = mImageContainer->CreatePlanarYCbCrImage();
     uint8_t* dstPtr = image->AsPlanarYCbCrImage()->AllocateAndGetNewBuffer(size);
 
     libyuv::ConvertToI420(srcPtr, size,
                           dstPtr, dstWidth,
                           dstPtr + (dstWidth * dstHeight), half_width,
@@ -816,17 +816,17 @@ MediaEngineGonkVideoSource::RotateImage(
     data.mCbChannel = dstPtr + dstHeight * data.mYStride;
     data.mCrChannel = data.mCbChannel + data.mCbCrStride * (dstHeight / 2);
     data.mCbCrSize = IntSize(dstWidth / 2, dstHeight / 2);
     data.mPicX = 0;
     data.mPicY = 0;
     data.mPicSize = IntSize(dstWidth, dstHeight);
     data.mStereoMode = StereoMode::MONO;
 
-    image->AsPlanarYCbCrImage()->SetDataNoCopy(data);
+    image->AsPlanarYCbCrImage()->AdoptData(data);
   }
   graphicBuffer->unlock();
 
   // Implicitly releases last preview image.
   mImage = image.forget();
 }
 
 bool
--- a/dom/media/webrtc/MediaEngineRemoteVideoSource.cpp
+++ b/dom/media/webrtc/MediaEngineRemoteVideoSource.cpp
@@ -337,17 +337,17 @@ MediaEngineRemoteVideoSource::DeliverFra
   data.mCbChannel = frame + mHeight * data.mYStride;
   data.mCrChannel = data.mCbChannel + ((mHeight+1)/2) * data.mCbCrStride;
   data.mCbCrSize = IntSize((mWidth+1)/ 2, (mHeight+1)/ 2);
   data.mPicX = 0;
   data.mPicY = 0;
   data.mPicSize = IntSize(mWidth, mHeight);
   data.mStereoMode = StereoMode::MONO;
 
-  if (!image->SetData(data)) {
+  if (!image->CopyData(data)) {
     MOZ_ASSERT(false);
     return 0;
   }
 
 #ifdef DEBUG
   static uint32_t frame_num = 0;
   LOGFRAME(("frame %d (%dx%d); timestamp %u, ntp_time %" PRIu64 ", render_time %" PRIu64,
             frame_num++, mWidth, mHeight, time_stamp, ntp_time, render_time));
--- a/gfx/layers/GrallocImages.cpp
+++ b/gfx/layers/GrallocImages.cpp
@@ -139,17 +139,17 @@ GrallocImage::SetData(const Data& aData)
   // though some gralloc hals implementation maps it when it is allocated.
   mData.mYChannel     = nullptr;
   mData.mCrChannel    = nullptr;
   mData.mCbChannel    = nullptr;
   return true;
 }
 
 void
-GrallocImage::SetData(TextureClient* aGraphicBuffer, const gfx::IntSize& aSize)
+GrallocImage::AdoptData(TextureClient* aGraphicBuffer, const gfx::IntSize& aSize)
 {
   mTextureClient = aGraphicBuffer;
   mSize = aSize;
 }
 
 /**
  * Converts YVU420 semi planar frames to RGB565, possibly taking different
  * stride values.
--- a/gfx/layers/GrallocImages.h
+++ b/gfx/layers/GrallocImages.h
@@ -58,21 +58,22 @@ public:
   virtual ~GrallocImage();
 
   /**
    * This makes a copy of the data buffers, in order to support functioning
    * in all different layer managers.
    */
   virtual bool SetData(const Data& aData);
 
+  using RecyclingPlanarYCbCrImage::AdoptData;
   /**
    *  Share the SurfaceDescriptor without making the copy, in order
    *  to support functioning in all different layer managers.
    */
-  void SetData(TextureClient* aGraphicBuffer, const gfx::IntSize& aSize);
+  void AdoptData(TextureClient* aGraphicBuffer, const gfx::IntSize& aSize);
 
   // From [android 4.0.4]/hardware/msm7k/libgralloc-qsd8k/gralloc_priv.h
   enum {
     /* OEM specific HAL formats */
     HAL_PIXEL_FORMAT_YCbCr_422_P            = 0x102,
     HAL_PIXEL_FORMAT_YCbCr_420_P            = 0x103,
     HAL_PIXEL_FORMAT_YCbCr_420_SP           = 0x109,
     HAL_PIXEL_FORMAT_YCrCb_420_SP_ADRENO    = 0x10A,
--- a/gfx/layers/ImageContainer.cpp
+++ b/gfx/layers/ImageContainer.cpp
@@ -529,32 +529,26 @@ RecyclingPlanarYCbCrImage::CopyData(cons
             mData.mCbCrSize, mData.mCbCrStride, mData.mCbSkip);
   CopyPlane(mData.mCrChannel, aData.mCrChannel,
             mData.mCbCrSize, mData.mCbCrStride, mData.mCrSkip);
 
   mSize = aData.mPicSize;
   return true;
 }
 
-bool
-RecyclingPlanarYCbCrImage::SetData(const Data &aData)
-{
-  return CopyData(aData);
-}
-
 gfxImageFormat
 PlanarYCbCrImage::GetOffscreenFormat()
 {
   return mOffscreenFormat == SurfaceFormat::UNKNOWN ?
     gfxPlatform::GetPlatform()->GetOffscreenFormat() :
     mOffscreenFormat;
 }
 
 bool
-PlanarYCbCrImage::SetDataNoCopy(const Data &aData)
+PlanarYCbCrImage::AdoptData(const Data &aData)
 {
   mData = aData;
   mSize = aData.mPicSize;
   return true;
 }
 
 uint8_t*
 RecyclingPlanarYCbCrImage::AllocateAndGetNewBuffer(uint32_t aSize)
--- a/gfx/layers/ImageContainer.h
+++ b/gfx/layers/ImageContainer.h
@@ -722,26 +722,26 @@ public:
   };
 
   virtual ~PlanarYCbCrImage() {}
 
   /**
    * This makes a copy of the data buffers, in order to support functioning
    * in all different layer managers.
    */
-  virtual bool SetData(const Data& aData) = 0;
+  virtual bool CopyData(const Data& aData) = 0;
 
   /**
    * This doesn't make a copy of the data buffers. Can be used when mBuffer is
-   * pre allocated with AllocateAndGetNewBuffer(size) and then SetDataNoCopy is
+   * pre allocated with AllocateAndGetNewBuffer(size) and then AdoptData is
    * called to only update the picture size, planes etc. fields in mData.
    * The GStreamer media backend uses this to decode into PlanarYCbCrImage(s)
    * directly.
    */
-  virtual bool SetDataNoCopy(const Data &aData);
+  virtual bool AdoptData(const Data &aData);
 
   /**
    * This allocates and returns a new buffer
    */
   virtual uint8_t* AllocateAndGetNewBuffer(uint32_t aSize) = 0;
 
   /**
    * Ask this Image to not convert YUV to RGB during SetData, and make
@@ -788,26 +788,20 @@ protected:
   nsCountedRef<nsMainThreadSourceSurfaceRef> mSourceSurface;
   uint32_t mBufferSize;
 };
 
 class RecyclingPlanarYCbCrImage: public PlanarYCbCrImage {
 public:
   explicit RecyclingPlanarYCbCrImage(BufferRecycleBin *aRecycleBin) : mRecycleBin(aRecycleBin) {}
   virtual ~RecyclingPlanarYCbCrImage() override;
-  virtual bool SetData(const Data& aData) override;
+  virtual bool CopyData(const Data& aData) override;
   virtual uint8_t* AllocateAndGetNewBuffer(uint32_t aSize) override;
   virtual size_t SizeOfExcludingThis(MallocSizeOf aMallocSizeOf) const override;
 protected:
-  /**
-   * Make a copy of the YCbCr data into local storage.
-   *
-   * @param aData           Input image data.
-   */
-  bool CopyData(const Data& aData);
 
   /**
    * Return a buffer to store image data in.
    */
   mozilla::UniquePtr<uint8_t[]> AllocateBuffer(uint32_t aSize);
 
   RefPtr<BufferRecycleBin> mRecycleBin;
   mozilla::UniquePtr<uint8_t[]> mBuffer;
--- a/gfx/layers/basic/BasicImages.cpp
+++ b/gfx/layers/basic/BasicImages.cpp
@@ -44,17 +44,17 @@ public:
   {
     if (mDecodedBuffer) {
       // Right now this only happens if the Image was never drawn, otherwise
       // this will have been tossed away at surface destruction.
       mRecycleBin->RecycleBuffer(Move(mDecodedBuffer), mSize.height * mStride);
     }
   }
 
-  virtual bool SetData(const Data& aData) override;
+  virtual bool CopyData(const Data& aData) override;
   virtual void SetDelayedConversion(bool aDelayed) override { mDelayedConversion = aDelayed; }
 
   already_AddRefed<gfx::SourceSurface> GetAsSourceSurface() override;
 
   virtual size_t SizeOfIncludingThis(MallocSizeOf aMallocSizeOf) const override
   {
     return aMallocSizeOf(this) + SizeOfExcludingThis(aMallocSizeOf);
   }
@@ -81,19 +81,19 @@ public:
   virtual RefPtr<PlanarYCbCrImage>
   CreatePlanarYCbCrImage(const gfx::IntSize& aScaleHint, BufferRecycleBin* aRecycleBin)
   {
     return new BasicPlanarYCbCrImage(aScaleHint, gfxPlatform::GetPlatform()->GetOffscreenFormat(), aRecycleBin);
   }
 };
 
 bool
-BasicPlanarYCbCrImage::SetData(const Data& aData)
+BasicPlanarYCbCrImage::CopyData(const Data& aData)
 {
-  RecyclingPlanarYCbCrImage::SetData(aData);
+  RecyclingPlanarYCbCrImage::CopyData(aData);
 
   if (mDelayedConversion) {
     return false;
   }
 
   // Do some sanity checks to prevent integer overflow
   if (aData.mYSize.width > PlanarYCbCrImage::MAX_DIMENSION ||
       aData.mYSize.height > PlanarYCbCrImage::MAX_DIMENSION) {
--- a/gfx/layers/ipc/SharedPlanarYCbCrImage.cpp
+++ b/gfx/layers/ipc/SharedPlanarYCbCrImage.cpp
@@ -77,17 +77,17 @@ SharedPlanarYCbCrImage::GetAsSourceSurfa
   if (!mTextureClient) {
     NS_WARNING("Can't get as surface");
     return nullptr;
   }
   return PlanarYCbCrImage::GetAsSourceSurface();
 }
 
 bool
-SharedPlanarYCbCrImage::SetData(const PlanarYCbCrData& aData)
+SharedPlanarYCbCrImage::CopyData(const PlanarYCbCrData& aData)
 {
   // If mTextureClient has not already been allocated (through Allocate(aData))
   // allocate it. This code path is slower than the one used when Allocate has
   // been called since it will trigger a full copy.
   PlanarYCbCrData data = aData;
   if (!mTextureClient && !Allocate(data)) {
     return false;
   }
@@ -135,19 +135,19 @@ SharedPlanarYCbCrImage::AllocateAndGetNe
     // buffer which is where the y channel starts by default.
     return mapped.y.data;
   } else {
     MOZ_CRASH();
   }
 }
 
 bool
-SharedPlanarYCbCrImage::SetDataNoCopy(const Data &aData)
+SharedPlanarYCbCrImage::AdoptData(const Data &aData)
 {
-  // SetDataNoCopy is used to update YUV plane offsets without (re)allocating
+  // AdoptData is used to update YUV plane offsets without (re)allocating
   // memory previously allocated with AllocateAndGetNewBuffer().
 
   MOZ_ASSERT(mTextureClient, "This Image should have already allocated data");
   if (!mTextureClient) {
     return false;
   }
   mData = aData;
   mSize = aData.mPicSize;
--- a/gfx/layers/ipc/SharedPlanarYCbCrImage.h
+++ b/gfx/layers/ipc/SharedPlanarYCbCrImage.h
@@ -29,18 +29,18 @@ public:
 protected:
   ~SharedPlanarYCbCrImage();
 
 public:
   virtual TextureClient* GetTextureClient(CompositableClient* aClient) override;
   virtual uint8_t* GetBuffer() override;
 
   virtual already_AddRefed<gfx::SourceSurface> GetAsSourceSurface() override;
-  virtual bool SetData(const PlanarYCbCrData& aData) override;
-  virtual bool SetDataNoCopy(const Data &aData) override;
+  virtual bool CopyData(const PlanarYCbCrData& aData) override;
+  virtual bool AdoptData(const Data &aData) override;
 
   virtual bool Allocate(PlanarYCbCrData& aData);
   virtual uint8_t* AllocateAndGetNewBuffer(uint32_t aSize) override;
 
   virtual bool IsValid() override;
 
   virtual size_t SizeOfIncludingThis(MallocSizeOf aMallocSizeOf) const override
   {
--- a/media/webrtc/signaling/src/media-conduit/WebrtcOMXH264VideoCodec.cpp
+++ b/media/webrtc/signaling/src/media-conduit/WebrtcOMXH264VideoCodec.cpp
@@ -524,17 +524,17 @@ public:
     RefPtr<layers::TextureClient> buffer = mNativeWindow->getCurrentBuffer();
     if (!buffer) {
       CODEC_LOGE("Decoder NewFrame: Get null buffer");
       return;
     }
 
     gfx::IntSize picSize(buffer->GetSize());
     nsAutoPtr<layers::GrallocImage> grallocImage(new layers::GrallocImage());
-    grallocImage->SetData(buffer, picSize);
+    grallocImage->AdoptData(buffer, picSize);
 
     // Get timestamp of the frame about to render.
     int64_t timestamp = -1;
     int64_t renderTimeMs = -1;
     {
       MutexAutoLock lock(mDecodedFrameLock);
       if (mDecodedFrames.empty()) {
         return;
@@ -999,18 +999,18 @@ WebrtcOMXH264VideoEncoder::Encode(const 
   yuvData.mCbCrStride = aInputImage.stride(webrtc::kUPlane);
   yuvData.mCbChannel = const_cast<uint8_t*>(aInputImage.buffer(webrtc::kUPlane));
   yuvData.mCrChannel = const_cast<uint8_t*>(aInputImage.buffer(webrtc::kVPlane));
   yuvData.mCbCrSize = gfx::IntSize((yuvData.mYSize.width + 1) / 2,
                                    (yuvData.mYSize.height + 1) / 2);
   yuvData.mPicSize = yuvData.mYSize;
   yuvData.mStereoMode = StereoMode::MONO;
   layers::RecyclingPlanarYCbCrImage img(nullptr);
-  // SetDataNoCopy() doesn't need AllocateAndGetNewBuffer(); OMXVideoEncoder is ok with this
-  img.SetDataNoCopy(yuvData);
+  // AdoptData() doesn't need AllocateAndGetNewBuffer(); OMXVideoEncoder is ok with this
+  img.AdoptData(yuvData);
 
   CODEC_LOGD("Encode frame: %dx%d, timestamp %u (%lld), renderTimeMs %" PRIu64,
              aInputImage.width(), aInputImage.height(),
              aInputImage.timestamp(), aInputImage.timestamp() * 1000ll / 90,
              aInputImage.render_time_ms());
 
   nsresult rv = mOMX->Encode(&img,
                              yuvData.mYSize.width,
--- a/media/webrtc/signaling/src/mediapipeline/MediaPipeline.cpp
+++ b/media/webrtc/signaling/src/mediapipeline/MediaPipeline.cpp
@@ -2229,17 +2229,17 @@ public:
       yuvData.mCbChannel = frame + height_ * yuvData.mYStride;
       yuvData.mCrChannel = yuvData.mCbChannel + ((height_ + 1) >> 1) * yuvData.mCbCrStride;
       yuvData.mCbCrSize = IntSize(yuvData.mCbCrStride, (height_ + 1) >> 1);
       yuvData.mPicX = 0;
       yuvData.mPicY = 0;
       yuvData.mPicSize = IntSize(width_, height_);
       yuvData.mStereoMode = StereoMode::MONO;
 
-      if (!yuvImage->SetData(yuvData)) {
+      if (!yuvImage->CopyData(yuvData)) {
         MOZ_ASSERT(false);
         return;
       }
 
       image_ = yuvImage;
     }
 #ifdef WEBRTC_GONK
     else {