Bug 952033 - Part c: Use IntSize and IntRect in VideoData; r=roc
authorMs2ger <ms2ger@gmail.com>
Sun, 09 Feb 2014 09:04:38 +0100
changeset 184914 2ec27f839431e624ae015938ebeb0e73442d0dfb
parent 184913 768b9e5779e3d5207ea6986b3f2e5d7c4ee91546
child 184915 131fb5ea3dc55445176343d316f2f306e796be51
push id3503
push userraliiev@mozilla.com
push dateMon, 28 Apr 2014 18:51:11 +0000
treeherdermozilla-beta@c95ac01e332e [default view] [failures only]
perfherder[talos] [build metrics] [platform microbench] (compared to previous push)
reviewersroc
bugs952033
milestone30.0a1
first release with
nightly linux32
nightly linux64
nightly mac
nightly win32
nightly win64
last release without
nightly linux32
nightly linux64
nightly mac
nightly win32
nightly win64
Bug 952033 - Part c: Use IntSize and IntRect in VideoData; r=roc
content/media/MediaData.cpp
content/media/MediaData.h
content/media/MediaDecoderStateMachine.cpp
content/media/fmp4/BlankDecoderModule.cpp
content/media/fmp4/wmf/WMFVideoOutputSource.cpp
content/media/gstreamer/GStreamerReader.cpp
content/media/gstreamer/GStreamerReader.h
content/media/ogg/OggReader.cpp
content/media/omx/MediaOmxReader.cpp
content/media/plugins/MediaPluginReader.cpp
content/media/raw/RawReader.cpp
content/media/webm/WebMReader.cpp
content/media/wmf/WMFReader.cpp
--- a/content/media/MediaData.cpp
+++ b/content/media/MediaData.cpp
@@ -68,17 +68,17 @@ VideoData::VideoData(int64_t aOffset, in
   NS_ASSERTION(mDuration >= 0, "Frame must have non-negative duration.");
 }
 
 VideoData::VideoData(int64_t aOffset,
                      int64_t aTime,
                      int64_t aDuration,
                      bool aKeyframe,
                      int64_t aTimecode,
-                     nsIntSize aDisplay)
+                     IntSize aDisplay)
   : MediaData(VIDEO_FRAME, aOffset, aTime, aDuration),
     mDisplay(aDisplay),
     mTimecode(aTimecode),
     mDuplicate(false),
     mKeyframe(aKeyframe)
 {
   MOZ_COUNT_CTOR(VideoData);
   NS_ASSERTION(mDuration >= 0, "Frame must have non-negative duration.");
@@ -107,27 +107,27 @@ VideoData* VideoData::Create(VideoInfo& 
                              ImageContainer* aContainer,
                              Image* aImage,
                              int64_t aOffset,
                              int64_t aTime,
                              int64_t aDuration,
                              const YCbCrBuffer& aBuffer,
                              bool aKeyframe,
                              int64_t aTimecode,
-                             nsIntRect aPicture)
+                             const IntRect& aPicture)
 {
   if (!aImage && !aContainer) {
     // Create a dummy VideoData with no image. This gives us something to
     // send to media streams if necessary.
     nsAutoPtr<VideoData> v(new VideoData(aOffset,
                                          aTime,
                                          aDuration,
                                          aKeyframe,
                                          aTimecode,
-                                         aInfo.mDisplay));
+                                         aInfo.mDisplay.ToIntSize()));
     return v.forget();
   }
 
   // The following situation should never happen unless there is a bug
   // in the decoder
   if (aBuffer.mPlanes[1].mWidth != aBuffer.mPlanes[2].mWidth ||
       aBuffer.mPlanes[1].mHeight != aBuffer.mPlanes[2].mHeight) {
     NS_ERROR("C planes with different sizes");
@@ -158,17 +158,17 @@ VideoData* VideoData::Create(VideoInfo& 
     return nullptr;
   }
 
   nsAutoPtr<VideoData> v(new VideoData(aOffset,
                                        aTime,
                                        aDuration,
                                        aKeyframe,
                                        aTimecode,
-                                       aInfo.mDisplay));
+                                       aInfo.mDisplay.ToIntSize()));
   const YCbCrBuffer::Plane &Y = aBuffer.mPlanes[0];
   const YCbCrBuffer::Plane &Cb = aBuffer.mPlanes[1];
   const YCbCrBuffer::Plane &Cr = aBuffer.mPlanes[2];
 
   if (!aImage) {
     // Currently our decoder only knows how to output to ImageFormat::PLANAR_YCBCR
     // format.
 #if 0
@@ -199,17 +199,17 @@ VideoData* VideoData::Create(VideoInfo& 
   data.mCbChannel = Cb.mData + Cb.mOffset;
   data.mCrChannel = Cr.mData + Cr.mOffset;
   data.mCbCrSize = IntSize(Cb.mWidth, Cb.mHeight);
   data.mCbCrStride = Cb.mStride;
   data.mCbSkip = Cb.mSkip;
   data.mCrSkip = Cr.mSkip;
   data.mPicX = aPicture.x;
   data.mPicY = aPicture.y;
-  data.mPicSize = aPicture.Size().ToIntSize();
+  data.mPicSize = aPicture.Size();
   data.mStereoMode = aInfo.mStereoMode;
 
   videoImage->SetDelayedConversion(true);
   if (!aImage) {
     videoImage->SetData(data);
   } else {
     videoImage->SetDataNoCopy(data);
   }
@@ -220,76 +220,76 @@ VideoData* VideoData::Create(VideoInfo& 
 VideoData* VideoData::Create(VideoInfo& aInfo,
                              ImageContainer* aContainer,
                              int64_t aOffset,
                              int64_t aTime,
                              int64_t aDuration,
                              const YCbCrBuffer& aBuffer,
                              bool aKeyframe,
                              int64_t aTimecode,
-                             nsIntRect aPicture)
+                             const IntRect& aPicture)
 {
   return Create(aInfo, aContainer, nullptr, aOffset, aTime, aDuration, aBuffer,
                 aKeyframe, aTimecode, aPicture);
 }
 
 VideoData* VideoData::Create(VideoInfo& aInfo,
                              Image* aImage,
                              int64_t aOffset,
                              int64_t aTime,
                              int64_t aDuration,
                              const YCbCrBuffer& aBuffer,
                              bool aKeyframe,
                              int64_t aTimecode,
-                             nsIntRect aPicture)
+                             const IntRect& aPicture)
 {
   return Create(aInfo, nullptr, aImage, aOffset, aTime, aDuration, aBuffer,
                 aKeyframe, aTimecode, aPicture);
 }
 
 VideoData* VideoData::CreateFromImage(VideoInfo& aInfo,
                                       ImageContainer* aContainer,
                                       int64_t aOffset,
                                       int64_t aTime,
                                       int64_t aDuration,
                                       const nsRefPtr<Image>& aImage,
                                       bool aKeyframe,
                                       int64_t aTimecode,
-                                      nsIntRect aPicture)
+                                      const IntRect& aPicture)
 {
   nsAutoPtr<VideoData> v(new VideoData(aOffset,
                                        aTime,
                                        aDuration,
                                        aKeyframe,
                                        aTimecode,
-                                       aInfo.mDisplay));
+                                       aInfo.mDisplay.ToIntSize()));
   v->mImage = aImage;
   return v.forget();
 }
 
 #ifdef MOZ_OMX_DECODER
 VideoData* VideoData::Create(VideoInfo& aInfo,
                              ImageContainer* aContainer,
                              int64_t aOffset,
                              int64_t aTime,
                              int64_t aDuration,
                              mozilla::layers::GraphicBufferLocked* aBuffer,
                              bool aKeyframe,
                              int64_t aTimecode,
-                             nsIntRect aPicture)
+                             const IntRect& aPicture)
 {
   if (!aContainer) {
     // Create a dummy VideoData with no image. This gives us something to
     // send to media streams if necessary.
     nsAutoPtr<VideoData> v(new VideoData(aOffset,
                                          aTime,
                                          aDuration,
                                          aKeyframe,
                                          aTimecode,
-                                         aInfo.mDisplay));
+                                         aInfo.mDisplay.ToIntSize()));
     return v.forget();
   }
 
   // The following situations could be triggered by invalid input
   if (aPicture.width <= 0 || aPicture.height <= 0) {
     NS_WARNING("Empty picture rect");
     return nullptr;
   }
@@ -306,29 +306,29 @@ VideoData* VideoData::Create(VideoInfo& 
     return nullptr;
   }
 
   nsAutoPtr<VideoData> v(new VideoData(aOffset,
                                        aTime,
                                        aDuration,
                                        aKeyframe,
                                        aTimecode,
-                                       aInfo.mDisplay));
+                                       aInfo.mDisplay.ToIntSize()));
 
   v->mImage = aContainer->CreateImage(ImageFormat::GRALLOC_PLANAR_YCBCR);
   if (!v->mImage) {
     return nullptr;
   }
   NS_ASSERTION(v->mImage->GetFormat() == ImageFormat::GRALLOC_PLANAR_YCBCR,
                "Wrong format?");
   typedef mozilla::layers::GrallocImage GrallocImage;
   GrallocImage* videoImage = static_cast<GrallocImage*>(v->mImage.get());
   GrallocImage::GrallocData data;
 
-  data.mPicSize = aPicture.Size().ToIntSize();
+  data.mPicSize = aPicture.Size();
   data.mGraphicBuffer = aBuffer;
 
   videoImage->SetData(data);
 
   return v.forget();
 }
 #endif  // MOZ_OMX_DECODER
 
--- a/content/media/MediaData.h
+++ b/content/media/MediaData.h
@@ -2,16 +2,17 @@
 /* vim: set ts=8 sts=2 et sw=2 tw=80: */
 /* This Source Code Form is subject to the terms of the Mozilla Public
  * License, v. 2.0. If a copy of the MPL was not distributed with this
  * file, You can obtain one at http://mozilla.org/MPL/2.0/. */
 #if !defined(MediaData_h)
 #define MediaData_h
 
 #include "nsSize.h"
+#include "mozilla/gfx/Rect.h"
 #include "nsRect.h"
 #include "AudioSampleFormat.h"
 #include "nsIMemoryReporter.h"
 #include "SharedBuffer.h"
 
 namespace mozilla {
 
 namespace layers {
@@ -103,16 +104,18 @@ namespace layers {
 class GraphicBufferLocked;
 }
 
 class VideoInfo;
 
 // Holds a decoded video frame, in YCbCr format. These are queued in the reader.
 class VideoData : public MediaData {
 public:
+  typedef gfx::IntRect IntRect;
+  typedef gfx::IntSize IntSize;
   typedef layers::ImageContainer ImageContainer;
   typedef layers::Image Image;
 
   // YCbCr data obtained from decoding the video. The index's are:
   //   0 = Y
   //   1 = Cb
   //   2 = Cr
   struct YCbCrBuffer {
@@ -140,59 +143,59 @@ public:
                            ImageContainer* aContainer,
                            Image* aImage,
                            int64_t aOffset,
                            int64_t aTime,
                            int64_t aDuration,
                            const YCbCrBuffer &aBuffer,
                            bool aKeyframe,
                            int64_t aTimecode,
-                           nsIntRect aPicture);
+                           const IntRect& aPicture);
 
   // Variant that always makes a copy of aBuffer
   static VideoData* Create(VideoInfo& aInfo,
                            ImageContainer* aContainer,
                            int64_t aOffset,
                            int64_t aTime,
                            int64_t aDuration,
                            const YCbCrBuffer &aBuffer,
                            bool aKeyframe,
                            int64_t aTimecode,
-                           nsIntRect aPicture);
+                           const IntRect& aPicture);
 
   // Variant to create a VideoData instance given an existing aImage
   static VideoData* Create(VideoInfo& aInfo,
                            Image* aImage,
                            int64_t aOffset,
                            int64_t aTime,
                            int64_t aDuration,
                            const YCbCrBuffer &aBuffer,
                            bool aKeyframe,
                            int64_t aTimecode,
-                           nsIntRect aPicture);
+                           const IntRect& aPicture);
 
   static VideoData* Create(VideoInfo& aInfo,
                            ImageContainer* aContainer,
                            int64_t aOffset,
                            int64_t aTime,
                            int64_t aDuration,
                            layers::GraphicBufferLocked* aBuffer,
                            bool aKeyframe,
                            int64_t aTimecode,
-                           nsIntRect aPicture);
+                           const IntRect& aPicture);
 
   static VideoData* CreateFromImage(VideoInfo& aInfo,
                                     ImageContainer* aContainer,
                                     int64_t aOffset,
                                     int64_t aTime,
                                     int64_t aDuration,
                                     const nsRefPtr<Image>& aImage,
                                     bool aKeyframe,
                                     int64_t aTimecode,
-                                    nsIntRect aPicture);
+                                    const IntRect& aPicture);
 
   // Creates a new VideoData identical to aOther, but with a different
   // specified duration. All data from aOther is copied into the new
   // VideoData. The new VideoData's mImage field holds a reference to
   // aOther's mImage, i.e. the Image is not copied. This function is useful
   // in reader backends that can't determine the duration of a VideoData
   // until the next frame is decoded, i.e. it's a way to change the const
   // duration field on a VideoData.
@@ -210,17 +213,17 @@ public:
     return new VideoData(aOffset, aTime, aDuration, aTimecode);
   }
 
   ~VideoData();
 
   // Dimensions at which to display the video frame. The picture region
   // will be scaled to this size. This is should be the picture region's
   // dimensions scaled with respect to its aspect ratio.
-  const nsIntSize mDisplay;
+  const IntSize mDisplay;
 
   // Codec specific internal time code. For Ogg based codecs this is the
   // granulepos.
   const int64_t mTimecode;
 
   // This frame's image.
   nsRefPtr<Image> mImage;
 
@@ -235,15 +238,15 @@ public:
             int64_t aDuration,
             int64_t aTimecode);
 
   VideoData(int64_t aOffset,
             int64_t aTime,
             int64_t aDuration,
             bool aKeyframe,
             int64_t aTimecode,
-            nsIntSize aDisplay);
+            IntSize aDisplay);
 
 };
 
 } // namespace mozilla
 
 #endif // MediaData_h
--- a/content/media/MediaDecoderStateMachine.cpp
+++ b/content/media/MediaDecoderStateMachine.cpp
@@ -26,16 +26,18 @@
 #include "ImageContainer.h"
 #include "nsComponentManagerUtils.h"
 #include "nsITimer.h"
 #include "nsContentUtils.h"
 #include "MediaShutdownManager.h"
 
 #include "prenv.h"
 #include "mozilla/Preferences.h"
+#include "gfx2DGlue.h"
+
 #include <algorithm>
 
 namespace mozilla {
 
 using namespace mozilla::layers;
 using namespace mozilla::dom;
 
 #ifdef PR_LOGGING
@@ -703,21 +705,21 @@ void MediaDecoderStateMachine::SendStrea
               &output);
           stream->mNextVideoTime = v->mTime;
         }
         if (stream->mNextVideoTime < v->GetEndTime()) {
           DECODER_LOG(PR_LOG_DEBUG, ("%p Decoder writing video frame %lldus to MediaStream %p for %lldus",
                                      mDecoder.get(), v->mTime, mediaStream,
                                      v->GetEndTime() - stream->mNextVideoTime));
           WriteVideoToMediaStream(v->mImage,
-              v->GetEndTime() - stream->mNextVideoTime, v->mDisplay,
+              v->GetEndTime() - stream->mNextVideoTime, ThebesIntSize(v->mDisplay),
               &output);
           stream->mNextVideoTime = v->GetEndTime();
           stream->mLastVideoImage = v->mImage;
-          stream->mLastVideoImageDisplaySize = v->mDisplay;
+          stream->mLastVideoImageDisplaySize = ThebesIntSize(v->mDisplay);
         } else {
           DECODER_LOG(PR_LOG_DEBUG, ("%p Decoder skipping writing video frame %lldus (end %lldus) to MediaStream",
                                      mDecoder.get(), v->mTime, v->GetEndTime()));
         }
       }
       if (output.GetDuration() > 0) {
         mediaStream->AppendToTrack(TRACK_VIDEO, &output);
       }
@@ -2417,17 +2419,18 @@ void MediaDecoderStateMachine::RenderVid
 
   if (!PR_GetEnv("MOZ_QUIET")) {
     DECODER_LOG(PR_LOG_DEBUG, ("%p Decoder playing video frame %lld",
                                mDecoder.get(), aData->mTime));
   }
 
   VideoFrameContainer* container = mDecoder->GetVideoFrameContainer();
   if (container) {
-    container->SetCurrentFrame(aData->mDisplay, aData->mImage, aTarget);
+    container->SetCurrentFrame(ThebesIntSize(aData->mDisplay), aData->mImage,
+                               aTarget);
   }
 }
 
 int64_t
 MediaDecoderStateMachine::GetAudioClock()
 {
   // We must hold the decoder monitor while using the audio stream off the
   // audio thread to ensure that it doesn't get destroyed on the audio thread
--- a/content/media/fmp4/BlankDecoderModule.cpp
+++ b/content/media/fmp4/BlankDecoderModule.cpp
@@ -92,17 +92,17 @@ public:
   BlankVideoDataCreator(uint32_t aFrameWidth,
                         uint32_t aFrameHeight,
                         layers::ImageContainer* aImageContainer)
     : mFrameWidth(aFrameWidth)
     , mFrameHeight(aFrameHeight)
     , mImageContainer(aImageContainer)
   {
     mInfo.mDisplay = nsIntSize(mFrameWidth, mFrameHeight);
-    mPicture = nsIntRect(0, 0, mFrameWidth, mFrameHeight);
+    mPicture = gfx::IntRect(0, 0, mFrameWidth, mFrameHeight);
   }
 
   MediaData* Create(Microseconds aDTS,
                     Microseconds aDuration,
                     int64_t aOffsetInStream)
   {
     // Create a fake YUV buffer in a 420 format. That is, an 8bpp Y plane,
     // with a U and V plane that are half the size of the Y plane, i.e 8 bit,
@@ -144,17 +144,17 @@ public:
                              aDuration,
                              buffer,
                              true,
                              aDTS,
                              mPicture);
   }
 private:
   VideoInfo mInfo;
-  nsIntRect mPicture;
+  gfx::IntRect mPicture;
   uint32_t mFrameWidth;
   uint32_t mFrameHeight;
   RefPtr<layers::ImageContainer> mImageContainer;
 };
 
 
 class BlankAudioDataCreator {
 public:
--- a/content/media/fmp4/wmf/WMFVideoOutputSource.cpp
+++ b/content/media/fmp4/wmf/WMFVideoOutputSource.cpp
@@ -9,24 +9,26 @@
 #include "WMFUtils.h"
 #include "ImageContainer.h"
 #include "VideoUtils.h"
 #include "DXVA2Manager.h"
 #include "nsThreadUtils.h"
 #include "Layers.h"
 #include "mozilla/layers/LayersTypes.h"
 #include "prlog.h"
+#include "gfx2DGlue.h"
 
 #ifdef PR_LOGGING
 PRLogModuleInfo* GetDemuxerLog();
 #define LOG(...) PR_LOG(GetDemuxerLog(), PR_LOG_DEBUG, (__VA_ARGS__))
 #else
 #define LOG(...)
 #endif
 
+using mozilla::gfx::ToIntRect;
 using mozilla::layers::Image;
 using mozilla::layers::LayerManager;
 using mozilla::layers::LayersBackend;
 
 namespace mozilla {
 
 WMFVideoOutputSource::WMFVideoOutputSource(mozilla::layers::LayersBackend aLayersBackend,
                                  mozilla::layers::ImageContainer* aImageContainer,
@@ -272,17 +274,17 @@ WMFVideoOutputSource::CreateBasicVideoFr
   VideoData *v = VideoData::Create(mVideoInfo,
                                    mImageContainer,
                                    aStreamOffset,
                                    pts,
                                    duration,
                                    b,
                                    false,
                                    -1,
-                                   mPictureRegion);
+                                   ToIntRect(mPictureRegion));
   if (twoDBuffer) {
     twoDBuffer->Unlock2D();
   } else {
     buffer->Unlock();
   }
 
   *aOutVideoData = v;
 
@@ -315,17 +317,17 @@ WMFVideoOutputSource::CreateD3DVideoFram
   VideoData *v = VideoData::CreateFromImage(mVideoInfo,
                                             mImageContainer,
                                             aStreamOffset,
                                             pts,
                                             duration,
                                             image.forget(),
                                             false,
                                             -1,
-                                            mPictureRegion);
+                                            ToIntRect(mPictureRegion));
 
   NS_ENSURE_TRUE(v, E_FAIL);
   *aOutVideoData = v;
 
   return S_OK;
 }
 
 // Blocks until decoded sample is produced by the deoder.
--- a/content/media/gstreamer/GStreamerReader.cpp
+++ b/content/media/gstreamer/GStreamerReader.cpp
@@ -11,19 +11,21 @@
 #include "MediaResource.h"
 #include "GStreamerReader.h"
 #include "GStreamerFormatHelper.h"
 #include "GStreamerMozVideoBuffer.h"
 #include "VideoUtils.h"
 #include "mozilla/dom/TimeRanges.h"
 #include "mozilla/Preferences.h"
 #include "GStreamerLoader.h"
+#include "gfx2DGlue.h"
 
 namespace mozilla {
 
+using namespace gfx;
 using namespace layers;
 
 // Un-comment to enable logging of seek bisections.
 //#define SEEK_LOGGING
 
 #ifdef PR_LOGGING
 extern PRLogModuleInfo* gMediaDecoderLog;
 #define LOG(type, msg) PR_LOG(gMediaDecoderLog, type, msg)
@@ -1007,17 +1009,17 @@ void GStreamerReader::VideoPreroll()
   /* The first video buffer has reached the video sink. Get width and height */
   LOG(PR_LOG_DEBUG, ("Video preroll"));
   GstPad* sinkpad = gst_element_get_pad(GST_ELEMENT(mVideoAppSink), "sink");
   GstCaps* caps = gst_pad_get_negotiated_caps(sinkpad);
   gst_video_format_parse_caps(caps, &mFormat, &mPicture.width, &mPicture.height);
   GstStructure* structure = gst_caps_get_structure(caps, 0);
   gst_structure_get_fraction(structure, "framerate", &fpsNum, &fpsDen);
   NS_ASSERTION(mPicture.width && mPicture.height, "invalid video resolution");
-  mInfo.mVideo.mDisplay = nsIntSize(mPicture.width, mPicture.height);
+  mInfo.mVideo.mDisplay = ThebesIntSize(mPicture.Size());
   mInfo.mVideo.mHasVideo = true;
   gst_caps_unref(caps);
   gst_object_unref(sinkpad);
 }
 
 GstFlowReturn GStreamerReader::NewBufferCb(GstAppSink* aSink,
                                            gpointer aUserData)
 {
--- a/content/media/gstreamer/GStreamerReader.h
+++ b/content/media/gstreamer/GStreamerReader.h
@@ -33,16 +33,18 @@ class TimeRanges;
 namespace layers {
 class PlanarYCbCrImage;
 }
 
 class AbstractMediaDecoder;
 
 class GStreamerReader : public MediaDecoderReader
 {
+  typedef gfx::IntRect IntRect;
+
 public:
   GStreamerReader(AbstractMediaDecoder* aDecoder);
   virtual ~GStreamerReader();
 
   virtual nsresult Init(MediaDecoderReader* aCloneDonor);
   virtual nsresult ResetDecode();
   virtual bool DecodeAudioData();
   virtual bool DecodeVideoFrame(bool &aKeyframeSkip,
@@ -174,17 +176,17 @@ private:
   GstElement* mVideoSink;
   /* the actual video app sink */
   GstAppSink* mVideoAppSink;
   /* audio sink bin */
   GstElement* mAudioSink;
   /* the actual audio app sink */
   GstAppSink* mAudioAppSink;
   GstVideoFormat mFormat;
-  nsIntRect mPicture;
+  IntRect mPicture;
   int mVideoSinkBufferCount;
   int mAudioSinkBufferCount;
   GstAppSrcCallbacks mSrcCallbacks;
   GstAppSinkCallbacks mSinkCallbacks;
   /* monitor used to synchronize access to shared state between gstreamer
    * threads and other gecko threads */
   ReentrantMonitor mGstThreadsMonitor;
   /* video and audio segments we use to convert absolute timestamps to [0,
--- a/content/media/ogg/OggReader.cpp
+++ b/content/media/ogg/OggReader.cpp
@@ -19,16 +19,19 @@ extern "C" {
 #include "opus/opus_multistream.h"
 }
 #endif
 #include "mozilla/dom/TimeRanges.h"
 #include "mozilla/TimeStamp.h"
 #include "VorbisUtils.h"
 #include "MediaMetadataManager.h"
 #include "nsISeekableStream.h"
+#include "gfx2DGlue.h"
+
+using namespace mozilla::gfx;
 
 namespace mozilla {
 
 // On B2G estimate the buffered ranges rather than calculating them explicitly.
 // This prevents us doing I/O on the main thread, which is prohibited in B2G.
 #ifdef MOZ_WIDGET_GONK
 #define OGG_ESTIMATE_BUFFERED 1
 #endif
@@ -760,17 +763,17 @@ nsresult OggReader::DecodeTheora(ogg_pac
     VideoData *v = VideoData::Create(mInfo.mVideo,
                                      mDecoder->GetImageContainer(),
                                      mDecoder->GetResource()->Tell(),
                                      time,
                                      endTime - time,
                                      b,
                                      isKeyframe,
                                      aPacket->granulepos,
-                                     mPicture);
+                                     ToIntRect(mPicture));
     if (!v) {
       // There may be other reasons for this error, but for
       // simplicity just assume the worst case: out of memory.
       NS_WARNING("Failed to allocate memory for video frame");
       return NS_ERROR_OUT_OF_MEMORY;
     }
     mVideoQueue.Push(v);
   }
--- a/content/media/omx/MediaOmxReader.cpp
+++ b/content/media/omx/MediaOmxReader.cpp
@@ -10,21 +10,23 @@
 #include "mozilla/TimeStamp.h"
 #include "mozilla/dom/TimeRanges.h"
 #include "MediaResource.h"
 #include "VideoUtils.h"
 #include "MediaOmxDecoder.h"
 #include "AbstractMediaDecoder.h"
 #include "OmxDecoder.h"
 #include "MPAPI.h"
+#include "gfx2DGlue.h"
 
 #define MAX_DROPPED_FRAMES 25
 // Try not to spend more than this much time in a single call to DecodeVideoFrame.
 #define MAX_VIDEO_DECODE_SECONDS 3.0
 
+using namespace mozilla::gfx;
 using namespace android;
 
 namespace mozilla {
 
 MediaOmxReader::MediaOmxReader(AbstractMediaDecoder *aDecoder) :
   MediaDecoderReader(aDecoder),
   mHasVideo(false),
   mHasAudio(false),
@@ -212,17 +214,17 @@ bool MediaOmxReader::DecodeVideoFrame(bo
       continue;
     }
 
     mSkipCount = 0;
 
     mVideoSeekTimeUs = -1;
     aKeyframeSkip = false;
 
-    nsIntRect picture = mPicture;
+    IntRect picture = ToIntRect(mPicture);
     if (frame.Y.mWidth != mInitialFrame.width ||
         frame.Y.mHeight != mInitialFrame.height) {
 
       // Frame size is different from what the container reports. This is legal,
       // and we will preserve the ratio of the crop rectangle as it
       // was reported relative to the picture size reported by the container.
       picture.x = (mPicture.x * frame.Y.mWidth) / mInitialFrame.width;
       picture.y = (mPicture.y * frame.Y.mHeight) / mInitialFrame.height;
--- a/content/media/plugins/MediaPluginReader.cpp
+++ b/content/media/plugins/MediaPluginReader.cpp
@@ -9,16 +9,17 @@
 #include "mozilla/gfx/Point.h"
 #include "MediaResource.h"
 #include "VideoUtils.h"
 #include "MediaPluginDecoder.h"
 #include "MediaPluginHost.h"
 #include "MediaDecoderStateMachine.h"
 #include "ImageContainer.h"
 #include "AbstractMediaDecoder.h"
+#include "gfx2DGlue.h"
 
 namespace mozilla {
 
 using namespace mozilla::gfx;
 
 typedef mozilla::layers::Image Image;
 typedef mozilla::layers::PlanarYCbCrImage PlanarYCbCrImage;
 
@@ -167,17 +168,17 @@ bool MediaPluginReader::DecodeVideoFrame
       aKeyframeSkip = false;
     }
 
     if (frame.mSize == 0)
       return true;
 
     currentImage = bufferCallback.GetImage();
     int64_t pos = mDecoder->GetResource()->Tell();
-    nsIntRect picture = mPicture;
+    IntRect picture = ToIntRect(mPicture);
 
     nsAutoPtr<VideoData> v;
     if (currentImage) {
       gfx::IntSize frameSize = currentImage->GetSize();
       if (frameSize.width != mInitialFrame.width ||
           frameSize.height != mInitialFrame.height) {
         // Frame size is different from what the container reports. This is legal,
         // and we will preserve the ratio of the crop rectangle as it
--- a/content/media/raw/RawReader.cpp
+++ b/content/media/raw/RawReader.cpp
@@ -4,16 +4,17 @@
  * file, You can obtain one at http://mozilla.org/MPL/2.0/. */
 
 #include "MediaDecoderStateMachine.h"
 #include "AbstractMediaDecoder.h"
 #include "RawReader.h"
 #include "RawDecoder.h"
 #include "VideoUtils.h"
 #include "nsISeekableStream.h"
+#include "gfx2DGlue.h"
 
 using namespace mozilla;
 
 RawReader::RawReader(AbstractMediaDecoder* aDecoder)
   : MediaDecoderReader(aDecoder),
     mCurrentFrame(0), mFrameSize(0)
 {
   MOZ_COUNT_CTOR(RawReader);
@@ -210,17 +211,17 @@ bool RawReader::DecodeVideoFrame(bool &a
   VideoData *v = VideoData::Create(mInfo.mVideo,
                                    mDecoder->GetImageContainer(),
                                    -1,
                                    currentFrameTime,
                                    (USECS_PER_S / mFrameRate),
                                    b,
                                    1, // In raw video every frame is a keyframe
                                    -1,
-                                   mPicture);
+                                   ToIntRect(mPicture));
   if (!v)
     return false;
 
   mVideoQueue.Push(v);
   mCurrentFrame++;
   decoded++;
   currentFrameTime += USECS_PER_S / mFrameRate;
 
--- a/content/media/webm/WebMReader.cpp
+++ b/content/media/webm/WebMReader.cpp
@@ -6,16 +6,18 @@
 #include "nsError.h"
 #include "MediaDecoderStateMachine.h"
 #include "AbstractMediaDecoder.h"
 #include "MediaResource.h"
 #include "WebMReader.h"
 #include "WebMBufferedParser.h"
 #include "mozilla/dom/TimeRanges.h"
 #include "VorbisUtils.h"
+#include "gfx2DGlue.h"
+
 #include <algorithm>
 
 #define VPX_DONT_DEFINE_STDINT_TYPES
 #include "vpx/vp8dx.h"
 #include "vpx/vpx_decoder.h"
 
 #include "OggReader.h"
 
@@ -25,16 +27,17 @@ template <>
 class nsAutoRefTraits<NesteggPacketHolder> : public nsPointerRefTraits<NesteggPacketHolder>
 {
 public:
   static void Release(NesteggPacketHolder* aHolder) { delete aHolder; }
 };
 
 namespace mozilla {
 
+using namespace gfx;
 using namespace layers;
 
 // Un-comment to enable logging of seek bisections.
 //#define SEEK_LOGGING
 
 #ifdef PR_LOGGING
 extern PRLogModuleInfo* gMediaDecoderLog;
 PRLogModuleInfo* gNesteggLog;
@@ -940,17 +943,17 @@ bool WebMReader::DecodeVideoFrame(bool &
       b.mPlanes[1].mOffset = b.mPlanes[1].mSkip = 0;
  
       b.mPlanes[2].mData = img->planes[2];
       b.mPlanes[2].mStride = img->stride[2];
       b.mPlanes[2].mHeight = (img->d_h + 1) >> img->y_chroma_shift;
       b.mPlanes[2].mWidth = (img->d_w + 1) >> img->x_chroma_shift;
       b.mPlanes[2].mOffset = b.mPlanes[2].mSkip = 0;
   
-      nsIntRect picture = mPicture;
+      IntRect picture = ToIntRect(mPicture);
       if (img->d_w != static_cast<uint32_t>(mInitialFrame.width) ||
           img->d_h != static_cast<uint32_t>(mInitialFrame.height)) {
         // Frame size is different from what the container reports. This is legal
         // in WebM, and we will preserve the ratio of the crop rectangle as it
         // was reported relative to the picture size reported by the container.
         picture.x = (mPicture.x * img->d_w) / mInitialFrame.width;
         picture.y = (mPicture.y * img->d_h) / mInitialFrame.height;
         picture.width = (img->d_w * mPicture.width) / mInitialFrame.width;
--- a/content/media/wmf/WMFReader.cpp
+++ b/content/media/wmf/WMFReader.cpp
@@ -19,17 +19,19 @@
 #include "mozilla/layers/LayersTypes.h"
 
 #ifndef MOZ_SAMPLE_TYPE_FLOAT32
 #error We expect 32bit float audio samples on desktop for the Windows Media Foundation media backend.
 #endif
 
 #include "MediaDecoder.h"
 #include "VideoUtils.h"
+#include "gfx2DGlue.h"
 
+using namespace mozilla::gfx;
 using mozilla::layers::Image;
 using mozilla::layers::LayerManager;
 using mozilla::layers::LayersBackend;
 
 namespace mozilla {
 
 #ifdef PR_LOGGING
 extern PRLogModuleInfo* gMediaDecoderLog;
@@ -725,17 +727,17 @@ WMFReader::CreateBasicVideoFrame(IMFSamp
   VideoData *v = VideoData::Create(mInfo.mVideo,
                                    mDecoder->GetImageContainer(),
                                    aOffsetBytes,
                                    aTimestampUsecs,
                                    aDurationUsecs,
                                    b,
                                    false,
                                    -1,
-                                   mPictureRegion);
+                                   ToIntRect(mPictureRegion));
   if (twoDBuffer) {
     twoDBuffer->Unlock2D();
   } else {
     buffer->Unlock();
   }
 
   *aOutVideoData = v;
 
@@ -768,17 +770,17 @@ WMFReader::CreateD3DVideoFrame(IMFSample
   VideoData *v = VideoData::CreateFromImage(mInfo.mVideo,
                                             mDecoder->GetImageContainer(),
                                             aOffsetBytes,
                                             aTimestampUsecs,
                                             aDurationUsecs,
                                             image.forget(),
                                             false,
                                             -1,
-                                            mPictureRegion);
+                                            ToIntRect(mPictureRegion));
 
   NS_ENSURE_TRUE(v, E_FAIL);
   *aOutVideoData = v;
 
   return S_OK;
 }
 
 bool