Bug 1219330 - Handle PlanaYCbCrImage::SetData failure. r=jya, r=jesup, a=lizzard
authorNicolas Silva <nsilva@mozilla.com>
Tue, 03 Nov 2015 12:24:26 +0100
changeset 305499 3389530de574177461df40b20e460b66107416a6
parent 305498 4038f198f46895875fa7ca6bbb1636c7a6c9b70e
child 305500 d05eb25b0baaaebf758544e5748a558379ae64da
push id1001
push userraliiev@mozilla.com
push dateMon, 18 Jan 2016 19:06:03 +0000
treeherdermozilla-release@8b89261f3ac4 [default view] [failures only]
perfherder[talos] [build metrics] [platform microbench] (compared to previous push)
reviewersjya, jesup, lizzard
bugs1219330
milestone44.0a2
Bug 1219330 - Handle PlanaYCbCrImage::SetData failure. r=jya, r=jesup, a=lizzard
dom/media/MediaData.cpp
dom/media/MediaData.h
dom/media/VideoSegment.cpp
dom/media/webrtc/MediaEngineDefault.cpp
dom/media/webrtc/MediaEngineRemoteVideoSource.cpp
gfx/layers/GrallocImages.cpp
gfx/layers/GrallocImages.h
gfx/layers/ImageContainer.cpp
gfx/layers/ImageContainer.h
gfx/layers/basic/BasicImages.cpp
gfx/layers/ipc/SharedPlanarYCbCrImage.cpp
gfx/layers/ipc/SharedPlanarYCbCrImage.h
media/webrtc/signaling/src/mediapipeline/MediaPipeline.cpp
--- a/dom/media/MediaData.cpp
+++ b/dom/media/MediaData.cpp
@@ -195,24 +195,24 @@ VideoData::ShallowCopyUpdateTimestampAnd
                                         aOther->mDisplay,
                                         aOther->mFrameID);
   v->mDiscontinuity = aOther->mDiscontinuity;
   v->mImage = aOther->mImage;
   return v.forget();
 }
 
 /* static */
-void VideoData::SetVideoDataToImage(PlanarYCbCrImage* aVideoImage,
+bool VideoData::SetVideoDataToImage(PlanarYCbCrImage* aVideoImage,
                                     const VideoInfo& aInfo,
                                     const YCbCrBuffer &aBuffer,
                                     const IntRect& aPicture,
                                     bool aCopyData)
 {
   if (!aVideoImage) {
-    return;
+    return false;
   }
   const YCbCrBuffer::Plane &Y = aBuffer.mPlanes[0];
   const YCbCrBuffer::Plane &Cb = aBuffer.mPlanes[1];
   const YCbCrBuffer::Plane &Cr = aBuffer.mPlanes[2];
 
   PlanarYCbCrData data;
   data.mYChannel = Y.mData + Y.mOffset;
   data.mYSize = IntSize(Y.mWidth, Y.mHeight);
@@ -226,19 +226,19 @@ void VideoData::SetVideoDataToImage(Plan
   data.mCrSkip = Cr.mSkip;
   data.mPicX = aPicture.x;
   data.mPicY = aPicture.y;
   data.mPicSize = aPicture.Size();
   data.mStereoMode = aInfo.mStereoMode;
 
   aVideoImage->SetDelayedConversion(true);
   if (aCopyData) {
-    aVideoImage->SetData(data);
+    return aVideoImage->SetData(data);
   } else {
-    aVideoImage->SetDataNoCopy(data);
+    return aVideoImage->SetDataNoCopy(data);
   }
 }
 
 /* static */
 already_AddRefed<VideoData>
 VideoData::Create(const VideoInfo& aInfo,
                   ImageContainer* aContainer,
                   Image* aImage,
@@ -327,34 +327,34 @@ VideoData::Create(const VideoInfo& aInfo
   if (!v->mImage) {
     return nullptr;
   }
   NS_ASSERTION(v->mImage->GetFormat() == ImageFormat::PLANAR_YCBCR ||
                v->mImage->GetFormat() == ImageFormat::GRALLOC_PLANAR_YCBCR,
                "Wrong format?");
   PlanarYCbCrImage* videoImage = static_cast<PlanarYCbCrImage*>(v->mImage.get());
 
-  if (!aImage) {
-    VideoData::SetVideoDataToImage(videoImage, aInfo, aBuffer, aPicture,
-                                   true /* aCopyData */);
-  } else {
-    VideoData::SetVideoDataToImage(videoImage, aInfo, aBuffer, aPicture,
-                                   false /* aCopyData */);
+  bool shouldCopyData = (aImage == nullptr);
+  if (!VideoData::SetVideoDataToImage(videoImage, aInfo, aBuffer, aPicture,
+                                      shouldCopyData)) {
+    return nullptr;
   }
 
 #ifdef MOZ_WIDGET_GONK
   if (!videoImage->IsValid() && !aImage && IsYV12Format(Y, Cb, Cr)) {
     // Failed to allocate gralloc. Try fallback.
     v->mImage = aContainer->CreateImage(ImageFormat::PLANAR_YCBCR);
     if (!v->mImage) {
       return nullptr;
     }
     videoImage = static_cast<PlanarYCbCrImage*>(v->mImage.get());
-    VideoData::SetVideoDataToImage(videoImage, aInfo, aBuffer, aPicture,
-                                   true /* aCopyData */);
+    if(!VideoData::SetVideoDataToImage(videoImage, aInfo, aBuffer, aPicture,
+                                       true /* aCopyData */)) {
+      return nullptr;
+    }
   }
 #endif
   return v.forget();
 }
 
 /* static */
 already_AddRefed<VideoData>
 VideoData::Create(const VideoInfo& aInfo,
@@ -470,17 +470,19 @@ VideoData::Create(const VideoInfo& aInfo
                "Wrong format?");
   typedef mozilla::layers::GrallocImage GrallocImage;
   GrallocImage* videoImage = static_cast<GrallocImage*>(v->mImage.get());
   GrallocImage::GrallocData data;
 
   data.mPicSize = aPicture.Size();
   data.mGraphicBuffer = aBuffer;
 
-  videoImage->SetData(data);
+  if (!videoImage->SetData(data)) {
+    return nullptr;
+  }
 
   return v.forget();
 }
 #endif  // MOZ_OMX_DECODER
 
 // Alignment value - 1. 0 means that data isn't aligned.
 // For 32-bytes aligned, use 31U.
 #define RAW_DATA_ALIGNMENT 31U
--- a/dom/media/MediaData.h
+++ b/dom/media/MediaData.h
@@ -277,17 +277,17 @@ public:
   // specified timestamp and duration. All data from aOther is copied
   // into the new VideoData, as ShallowCopyUpdateDuration() does.
   static already_AddRefed<VideoData>
   ShallowCopyUpdateTimestampAndDuration(const VideoData* aOther, int64_t aTimestamp,
                                         int64_t aDuration);
 
   // Initialize PlanarYCbCrImage. Only When aCopyData is true,
   // video data is copied to PlanarYCbCrImage.
-  static void SetVideoDataToImage(PlanarYCbCrImage* aVideoImage,
+  static bool SetVideoDataToImage(PlanarYCbCrImage* aVideoImage,
                                   const VideoInfo& aInfo,
                                   const YCbCrBuffer &aBuffer,
                                   const IntRect& aPicture,
                                   bool aCopyData);
 
   size_t SizeOfIncludingThis(MallocSizeOf aMallocSizeOf) const;
 
   // Dimensions at which to display the video frame. The picture region
--- a/dom/media/VideoSegment.cpp
+++ b/dom/media/VideoSegment.cpp
@@ -75,17 +75,20 @@ VideoFrame::CreateBlackImage(const gfx::
   data.mCrChannel = data.mCbChannel + aSize.height * data.mCbCrStride / 2;
   data.mCbCrSize = gfx::IntSize(aSize.width / 2, aSize.height / 2);
   data.mPicX = 0;
   data.mPicY = 0;
   data.mPicSize = gfx::IntSize(aSize.width, aSize.height);
   data.mStereoMode = StereoMode::MONO;
 
   // SetData copies data, so we can free data.
-  planar->SetData(data);
+  if (!planar->SetData(data)) {
+    MOZ_ASSERT(false);
+    return nullptr;
+  }
 
   return image.forget();
 }
 #endif // !defined(MOZILLA_XPCOMRT_API)
 
 VideoChunk::VideoChunk()
 {}
 
--- a/dom/media/webrtc/MediaEngineDefault.cpp
+++ b/dom/media/webrtc/MediaEngineDefault.cpp
@@ -245,20 +245,26 @@ MediaEngineDefaultVideoSource::Notify(ns
 #ifdef MOZ_WEBRTC
   uint64_t timestamp = PR_Now();
   YuvStamper::Encode(mOpts.mWidth, mOpts.mHeight, mOpts.mWidth,
 		     data.mYChannel,
 		     reinterpret_cast<unsigned char*>(&timestamp), sizeof(timestamp),
 		     0, 0);
 #endif
 
-  ycbcr_image->SetData(data);
+  bool setData = ycbcr_image->SetData(data);
+  MOZ_ASSERT(setData);
+
   // SetData copies data, so we can free the frame
   ReleaseFrame(data);
 
+  if (!setData) {
+    return NS_ERROR_FAILURE;
+  }
+
   MonitorAutoLock lock(mMonitor);
 
   // implicitly releases last image
   mImage = ycbcr_image.forget();
 
   return NS_OK;
 }
 
--- a/dom/media/webrtc/MediaEngineRemoteVideoSource.cpp
+++ b/dom/media/webrtc/MediaEngineRemoteVideoSource.cpp
@@ -310,17 +310,20 @@ MediaEngineRemoteVideoSource::DeliverFra
   data.mCbChannel = frame + mHeight * data.mYStride;
   data.mCrChannel = data.mCbChannel + ((mHeight+1)/2) * data.mCbCrStride;
   data.mCbCrSize = IntSize((mWidth+1)/ 2, (mHeight+1)/ 2);
   data.mPicX = 0;
   data.mPicY = 0;
   data.mPicSize = IntSize(mWidth, mHeight);
   data.mStereoMode = StereoMode::MONO;
 
-  videoImage->SetData(data);
+  if (!videoImage->SetData(data)) {
+    MOZ_ASSERT(false);
+    return 0;
+  }
 
 #ifdef DEBUG
   static uint32_t frame_num = 0;
   LOGFRAME(("frame %d (%dx%d); timestamp %u, ntp_time %" PRIu64 ", render_time %" PRIu64,
             frame_num++, mWidth, mHeight, time_stamp, ntp_time, render_time));
 #endif
 
   // we don't touch anything in 'this' until here (except for snapshot,
--- a/gfx/layers/GrallocImages.cpp
+++ b/gfx/layers/GrallocImages.cpp
@@ -52,30 +52,30 @@ GrallocImage::GrallocImage()
 {
   mFormat = ImageFormat::GRALLOC_PLANAR_YCBCR;
 }
 
 GrallocImage::~GrallocImage()
 {
 }
 
-void
+bool
 GrallocImage::SetData(const Data& aData)
 {
   MOZ_ASSERT(!mTextureClient, "TextureClient is already set");
   NS_PRECONDITION(aData.mYSize.width % 2 == 0, "Image should have even width");
   NS_PRECONDITION(aData.mYSize.height % 2 == 0, "Image should have even height");
   NS_PRECONDITION(aData.mYStride % 16 == 0, "Image should have stride of multiple of 16 pixels");
 
   mData = aData;
   mSize = aData.mPicSize;
 
   if (gfxPlatform::GetPlatform()->IsInGonkEmulator()) {
     // Emulator does not support HAL_PIXEL_FORMAT_YV12.
-    return;
+    return false;
   }
 
   RefPtr<GrallocTextureClientOGL> textureClient =
        new GrallocTextureClientOGL(ImageBridgeChild::GetSingleton(),
                                    gfx::SurfaceFormat::UNKNOWN,
                                    gfx::BackendType::NONE);
   // GrallocImages are all YUV and don't support alpha.
   textureClient->SetIsOpaque(true);
@@ -83,25 +83,25 @@ GrallocImage::SetData(const Data& aData)
     textureClient->AllocateGralloc(mData.mYSize,
                                    HAL_PIXEL_FORMAT_YV12,
                                    GraphicBuffer::USAGE_SW_READ_OFTEN |
                                    GraphicBuffer::USAGE_SW_WRITE_OFTEN |
                                    GraphicBuffer::USAGE_HW_TEXTURE);
   sp<GraphicBuffer> graphicBuffer = textureClient->GetGraphicBuffer();
   if (!result || !graphicBuffer.get()) {
     mTextureClient = nullptr;
-    return;
+    return false;
   }
 
   mTextureClient = textureClient;
 
   void* vaddr;
   if (graphicBuffer->lock(GraphicBuffer::USAGE_SW_WRITE_OFTEN,
                           &vaddr) != OK) {
-    return;
+    return false;
   }
 
   uint8_t* yChannel = static_cast<uint8_t*>(vaddr);
   gfx::IntSize ySize = aData.mYSize;
   int32_t yStride = graphicBuffer->getStride();
 
   uint8_t* vChannel = yChannel + (yStride * ySize.height);
   gfx::IntSize uvSize = gfx::IntSize(ySize.width / 2,
@@ -139,22 +139,24 @@ GrallocImage::SetData(const Data& aData)
   graphicBuffer->unlock();
   // Initialze the channels' addresses.
   // Do not cache the addresses when gralloc buffer is not locked.
   // gralloc hal could map gralloc buffer only when the buffer is locked,
   // though some gralloc hals implementation maps it when it is allocated.
   mData.mYChannel     = nullptr;
   mData.mCrChannel    = nullptr;
   mData.mCbChannel    = nullptr;
+  return true;
 }
 
-void GrallocImage::SetData(const GrallocData& aData)
+bool GrallocImage::SetData(const GrallocData& aData)
 {
   mTextureClient = static_cast<GrallocTextureClientOGL*>(aData.mGraphicBuffer.get());
   mSize = aData.mPicSize;
+  return true;
 }
 
 /**
  * Converts YVU420 semi planar frames to RGB565, possibly taking different
  * stride values.
  * Needed because the Android ColorConverter class assumes that the Y and UV
  * channels have equal stride.
  */
--- a/gfx/layers/GrallocImages.h
+++ b/gfx/layers/GrallocImages.h
@@ -61,23 +61,23 @@ public:
   GrallocImage();
 
   virtual ~GrallocImage();
 
   /**
    * This makes a copy of the data buffers, in order to support functioning
    * in all different layer managers.
    */
-  virtual void SetData(const Data& aData);
+  virtual bool SetData(const Data& aData);
 
   /**
    *  Share the SurfaceDescriptor without making the copy, in order
    *  to support functioning in all different layer managers.
    */
-  virtual void SetData(const GrallocData& aData);
+  virtual bool SetData(const GrallocData& aData);
 
   // From [android 4.0.4]/hardware/msm7k/libgralloc-qsd8k/gralloc_priv.h
   enum {
     /* OEM specific HAL formats */
     HAL_PIXEL_FORMAT_YCbCr_422_P            = 0x102,
     HAL_PIXEL_FORMAT_YCbCr_420_P            = 0x103,
     HAL_PIXEL_FORMAT_YCbCr_420_SP           = 0x109,
     HAL_PIXEL_FORMAT_YCrCb_420_SP_ADRENO    = 0x10A,
--- a/gfx/layers/ImageContainer.cpp
+++ b/gfx/layers/ImageContainer.cpp
@@ -488,66 +488,68 @@ CopyPlane(uint8_t *aDst, const uint8_t *
         src += aSkip;
       }
       aSrc += aStride;
       aDst += aStride;
     }
   }
 }
 
-void
+bool
 PlanarYCbCrImage::CopyData(const Data& aData)
 {
   mData = aData;
 
   // update buffer size
   size_t size = mData.mCbCrStride * mData.mCbCrSize.height * 2 +
                 mData.mYStride * mData.mYSize.height;
 
   // get new buffer
   mBuffer = AllocateBuffer(size);
   if (!mBuffer)
-    return;
+    return false;
 
   // update buffer size
   mBufferSize = size;
 
   mData.mYChannel = mBuffer;
   mData.mCbChannel = mData.mYChannel + mData.mYStride * mData.mYSize.height;
   mData.mCrChannel = mData.mCbChannel + mData.mCbCrStride * mData.mCbCrSize.height;
 
   CopyPlane(mData.mYChannel, aData.mYChannel,
             mData.mYSize, mData.mYStride, mData.mYSkip);
   CopyPlane(mData.mCbChannel, aData.mCbChannel,
             mData.mCbCrSize, mData.mCbCrStride, mData.mCbSkip);
   CopyPlane(mData.mCrChannel, aData.mCrChannel,
             mData.mCbCrSize, mData.mCbCrStride, mData.mCrSkip);
 
   mSize = aData.mPicSize;
+  return true;
 }
 
-void
+bool
 PlanarYCbCrImage::SetData(const Data &aData)
 {
-  CopyData(aData);
+  return CopyData(aData);
 }
 
 gfxImageFormat
 PlanarYCbCrImage::GetOffscreenFormat()
 {
   return mOffscreenFormat == gfxImageFormat::Unknown ?
     gfxPlatform::GetPlatform()->GetOffscreenFormat() :
     mOffscreenFormat;
 }
 
-void
+bool
 PlanarYCbCrImage::SetDataNoCopy(const Data &aData)
 {
   mData = aData;
   mSize = aData.mPicSize;
+  return true;
 }
 
 uint8_t*
 PlanarYCbCrImage::AllocateAndGetNewBuffer(uint32_t aSize)
 {
   // get new buffer
   mBuffer = AllocateBuffer(aSize);
   if (mBuffer) {
--- a/gfx/layers/ImageContainer.h
+++ b/gfx/layers/ImageContainer.h
@@ -651,26 +651,26 @@ public:
   };
 
   virtual ~PlanarYCbCrImage();
 
   /**
    * This makes a copy of the data buffers, in order to support functioning
    * in all different layer managers.
    */
-  virtual void SetData(const Data& aData);
+  virtual bool SetData(const Data& aData);
 
   /**
    * This doesn't make a copy of the data buffers. Can be used when mBuffer is
    * pre allocated with AllocateAndGetNewBuffer(size) and then SetDataNoCopy is
    * called to only update the picture size, planes etc. fields in mData.
    * The GStreamer media backend uses this to decode into PlanarYCbCrImage(s)
    * directly.
    */
-  virtual void SetDataNoCopy(const Data &aData);
+  virtual bool SetDataNoCopy(const Data &aData);
 
   /**
    * This allocates and returns a new buffer
    */
   virtual uint8_t* AllocateAndGetNewBuffer(uint32_t aSize);
 
   /**
    * Ask this Image to not convert YUV to RGB during SetData, and make
@@ -704,17 +704,17 @@ public:
   virtual size_t SizeOfExcludingThis(MallocSizeOf aMallocSizeOf) const;
 
 protected:
   /**
    * Make a copy of the YCbCr data into local storage.
    *
    * @param aData           Input image data.
    */
-  void CopyData(const Data& aData);
+  bool CopyData(const Data& aData);
 
   /**
    * Return a buffer to store image data in.
    * The default implementation returns memory that can
    * be freed wit delete[]
    */
   virtual uint8_t* AllocateBuffer(uint32_t aSize);
 
--- a/gfx/layers/basic/BasicImages.cpp
+++ b/gfx/layers/basic/BasicImages.cpp
@@ -43,17 +43,17 @@ public:
   {
     if (mDecodedBuffer) {
       // Right now this only happens if the Image was never drawn, otherwise
       // this will have been tossed away at surface destruction.
       mRecycleBin->RecycleBuffer(mDecodedBuffer.forget(), mSize.height * mStride);
     }
   }
 
-  virtual void SetData(const Data& aData) override;
+  virtual bool SetData(const Data& aData) override;
   virtual void SetDelayedConversion(bool aDelayed) override { mDelayedConversion = aDelayed; }
 
   already_AddRefed<gfx::SourceSurface> GetAsSourceSurface() override;
 
   virtual size_t SizeOfIncludingThis(MallocSizeOf aMallocSizeOf) const override
   {
     return aMallocSizeOf(this) + SizeOfExcludingThis(aMallocSizeOf);
   }
@@ -86,53 +86,55 @@ public:
       image = new BasicPlanarYCbCrImage(aScaleHint, gfxPlatform::GetPlatform()->GetOffscreenFormat(), aRecycleBin);
       return image.forget();
     }
 
     return ImageFactory::CreateImage(aFormat, aScaleHint, aRecycleBin);
   }
 };
 
-void
+bool
 BasicPlanarYCbCrImage::SetData(const Data& aData)
 {
   PlanarYCbCrImage::SetData(aData);
 
   if (mDelayedConversion) {
-    return;
+    return false;
   }
 
   // Do some sanity checks to prevent integer overflow
   if (aData.mYSize.width > PlanarYCbCrImage::MAX_DIMENSION ||
       aData.mYSize.height > PlanarYCbCrImage::MAX_DIMENSION) {
     NS_ERROR("Illegal image source width or height");
-    return;
+    return false;
   }
 
   gfx::SurfaceFormat format = gfx::ImageFormatToSurfaceFormat(GetOffscreenFormat());
 
   gfx::IntSize size(mScaleHint);
   gfx::GetYCbCrToRGBDestFormatAndSize(aData, format, size);
   if (size.width > PlanarYCbCrImage::MAX_DIMENSION ||
       size.height > PlanarYCbCrImage::MAX_DIMENSION) {
     NS_ERROR("Illegal image dest width or height");
-    return;
+    return false;
   }
 
   gfxImageFormat iFormat = gfx::SurfaceFormatToImageFormat(format);
   mStride = gfxASurface::FormatStrideForWidth(iFormat, size.width);
   mDecodedBuffer = AllocateBuffer(size.height * mStride);
   if (!mDecodedBuffer) {
     // out of memory
-    return;
+    return false;
   }
 
   gfx::ConvertYCbCrToRGB(aData, format, size, mDecodedBuffer, mStride);
   SetOffscreenFormat(iFormat);
   mSize = size;
+
+  return true;
 }
 
 already_AddRefed<gfx::SourceSurface>
 BasicPlanarYCbCrImage::GetAsSourceSurface()
 {
   NS_ASSERTION(NS_IsMainThread(), "Must be main thread");
 
   if (mSourceSurface) {
--- a/gfx/layers/ipc/SharedPlanarYCbCrImage.cpp
+++ b/gfx/layers/ipc/SharedPlanarYCbCrImage.cpp
@@ -75,38 +75,39 @@ SharedPlanarYCbCrImage::GetAsSourceSurfa
 {
   if (!mTextureClient) {
     NS_WARNING("Can't get as surface");
     return nullptr;
   }
   return PlanarYCbCrImage::GetAsSourceSurface();
 }
 
-void
+bool
 SharedPlanarYCbCrImage::SetData(const PlanarYCbCrData& aData)
 {
   // If mTextureClient has not already been allocated (through Allocate(aData))
   // allocate it. This code path is slower than the one used when Allocate has
   // been called since it will trigger a full copy.
   PlanarYCbCrData data = aData;
   if (!mTextureClient && !Allocate(data)) {
-    return;
+    return false;
   }
 
   MOZ_ASSERT(mTextureClient->AsTextureClientYCbCr());
   if (!mTextureClient->Lock(OpenMode::OPEN_WRITE_ONLY)) {
     MOZ_ASSERT(false, "Failed to lock the texture.");
-    return;
+    return false;
   }
   TextureClientAutoUnlock unlock(mTextureClient);
   if (!mTextureClient->AsTextureClientYCbCr()->UpdateYCbCr(aData)) {
     MOZ_ASSERT(false, "Failed to copy YCbCr data into the TextureClient");
-    return;
+    return false;
   }
   mTextureClient->MarkImmutable();
+  return true;
 }
 
 // needs to be overriden because the parent class sets mBuffer which we
 // do not want to happen.
 uint8_t*
 SharedPlanarYCbCrImage::AllocateAndGetNewBuffer(uint32_t aSize)
 {
   MOZ_ASSERT(!mTextureClient, "This image already has allocated data");
@@ -126,22 +127,22 @@ SharedPlanarYCbCrImage::AllocateAndGetNe
 
   // update buffer size
   mBufferSize = size;
 
   YCbCrImageDataSerializer serializer(mTextureClient->GetBuffer(), mTextureClient->GetBufferSize());
   return serializer.GetData();
 }
 
-void
+bool
 SharedPlanarYCbCrImage::SetDataNoCopy(const Data &aData)
 {
   MOZ_ASSERT(mTextureClient, "This Image should have already allocated data");
   if (!mTextureClient) {
-    return;
+    return false;
   }
   mData = aData;
   mSize = aData.mPicSize;
   /* SetDataNoCopy is used to update YUV plane offsets without (re)allocating
    * memory previously allocated with AllocateAndGetNewBuffer().
    * serializer.GetData() returns the address of the memory previously allocated
    * with AllocateAndGetNewBuffer(), that we subtract from the Y, Cb, Cr
    * channels to compute 0-based offsets to pass to InitializeBufferInfo.
@@ -154,16 +155,17 @@ SharedPlanarYCbCrImage::SetDataNoCopy(co
   serializer.InitializeBufferInfo(yOffset,
                                   cbOffset,
                                   crOffset,
                                   aData.mYStride,
                                   aData.mCbCrStride,
                                   aData.mYSize,
                                   aData.mCbCrSize,
                                   aData.mStereoMode);
+  return true;
 }
 
 uint8_t*
 SharedPlanarYCbCrImage::AllocateBuffer(uint32_t aSize)
 {
   MOZ_ASSERT(!mTextureClient,
              "This image already has allocated data");
   mTextureClient = TextureClient::CreateWithBufferSize(mCompositable->GetForwarder(),
--- a/gfx/layers/ipc/SharedPlanarYCbCrImage.h
+++ b/gfx/layers/ipc/SharedPlanarYCbCrImage.h
@@ -30,18 +30,18 @@ public:
 protected:
   ~SharedPlanarYCbCrImage();
 
 public:
   virtual TextureClient* GetTextureClient(CompositableClient* aClient) override;
   virtual uint8_t* GetBuffer() override;
 
   virtual already_AddRefed<gfx::SourceSurface> GetAsSourceSurface() override;
-  virtual void SetData(const PlanarYCbCrData& aData) override;
-  virtual void SetDataNoCopy(const Data &aData) override;
+  virtual bool SetData(const PlanarYCbCrData& aData) override;
+  virtual bool SetDataNoCopy(const Data &aData) override;
 
   virtual bool Allocate(PlanarYCbCrData& aData);
   virtual uint8_t* AllocateBuffer(uint32_t aSize) override;
   // needs to be overriden because the parent class sets mBuffer which we
   // do not want to happen.
   virtual uint8_t* AllocateAndGetNewBuffer(uint32_t aSize) override;
 
   virtual bool IsValid() override;
--- a/media/webrtc/signaling/src/mediapipeline/MediaPipeline.cpp
+++ b/media/webrtc/signaling/src/mediapipeline/MediaPipeline.cpp
@@ -1496,17 +1496,20 @@ void MediaPipelineReceiveVideo::Pipeline
     yuvData.mCbChannel = frame + height_ * yuvData.mYStride;
     yuvData.mCrChannel = yuvData.mCbChannel + ((height_ + 1) >> 1) * yuvData.mCbCrStride;
     yuvData.mCbCrSize = IntSize((width_ + 1) >> 1, (height_ + 1) >> 1);
     yuvData.mPicX = 0;
     yuvData.mPicY = 0;
     yuvData.mPicSize = IntSize(width_, height_);
     yuvData.mStereoMode = StereoMode::MONO;
 
-    yuvImage->SetData(yuvData);
+    if (!yuvImage->SetData(yuvData)) {
+      MOZ_ASSERT(false);
+      return;
+    }
 
     image_ = image.forget();
   }
 #ifdef WEBRTC_GONK
   else {
     // Decoder produced video frame that can be appended to the track directly.
     MOZ_ASSERT(video_image);
     image_ = video_image;