Bug 962784 - Convert ImageTypes.h to typed enums - r=jrmuizel
authorBenoit Jacob <bjacob@mozilla.com>
Thu, 30 Jan 2014 17:58:49 -0500
changeset 182157 739b4300adae37f2e67fb70ec28550c0fce9d4c4
parent 182156 6c828f2561791d0cff225fbb3311464b11129a91
child 182158 2cf4e419eb96be265890be2fe8bd2a16d8bda032
push id3343
push userffxbld
push dateMon, 17 Mar 2014 21:55:32 +0000
treeherdermozilla-beta@2f7d3415f79f [default view] [failures only]
perfherder[talos] [build metrics] [platform microbench] (compared to previous push)
reviewersjrmuizel
bugs962784
milestone29.0a1
first release with
nightly linux32
nightly linux64
nightly mac
nightly win32
nightly win64
last release without
nightly linux32
nightly linux64
nightly mac
nightly win32
nightly win64
Bug 962784 - Convert ImageTypes.h to typed enums - r=jrmuizel find content/media media/webrtc gfx/layers gfx/tests dom/camera dom/plugins image/src layout/base -type f | grep -v 'gfx/layers/ImageTypes.h' | xargs sed -i 's/\(^\|[^A-Za-z0-9_]\)\(GRALLOC_PLANAR_YCBCR\|PLANAR_YCBCR\|SHARED_RGB\|CAIRO_SURFACE\|MAC_IOSURFACE\|REMOTE_IMAGE_BITMAP\|SHARED_TEXTURE\|REMOTE_IMAGE_DXGI_TEXTURE\|D3D9_RGB32_TEXTURE\)\($\|[^A-Za-z0-9_]\)/\1ImageFormat::\2\3/g' find content/media media/webrtc gfx/layers gfx/tests dom/camera dom/plugins image/src layout/base -type f | grep -v 'gfx/layers/ImageTypes.h' | xargs sed -i 's|ImageFormat\:\:ImageFormat|ImageFormat|g' find content/media media/webrtc gfx/layers gfx/tests dom/camera dom/plugins image/src layout/base -type f | xargs sed -i 's/\(^\|[^A-Za-z0-9_]\)STEREO_MODE_\(MONO\|LEFT_RIGHT\|RIGHT_LEFT\|BOTTOM_TOP\|TOP_BOTTOM\)\($\|[^A-Za-z0-9_]\)/\1StereoMode::\2\3/g' find content/media media/webrtc gfx/layers gfx/tests dom/camera dom/plugins image/src layout/base -type f | grep -v 'gfx/layers/ImageTypes.h' | xargs sed -i 's|StereoMode\:\:StereoMode|StereoMode|g'
content/media/MediaDecoderReader.cpp
content/media/MediaDecoderReader.h
content/media/MediaStreamGraph.cpp
content/media/encoder/VP8TrackEncoder.cpp
content/media/gstreamer/GStreamerReader.cpp
content/media/omx/OMXCodecWrapper.cpp
content/media/plugins/MediaPluginReader.cpp
content/media/webm/WebMReader.cpp
content/media/webrtc/MediaEngineDefault.cpp
content/media/webrtc/MediaEngineWebRTCVideo.cpp
content/media/wmf/DXVA2Manager.cpp
dom/plugins/ipc/PluginInstanceParent.cpp
gfx/layers/D3D9SurfaceImage.h
gfx/layers/GrallocImages.cpp
gfx/layers/ImageContainer.cpp
gfx/layers/ImageContainer.h
gfx/layers/ImageTypes.h
gfx/layers/MacIOSurfaceImage.h
gfx/layers/SharedTextureImage.h
gfx/layers/basic/BasicImages.cpp
gfx/layers/client/ImageClient.cpp
gfx/layers/client/ImageClient.h
gfx/layers/d3d10/ImageLayerD3D10.cpp
gfx/layers/d3d10/ImageLayerD3D10.h
gfx/layers/d3d9/CompositorD3D9.cpp
gfx/layers/d3d9/ImageLayerD3D9.cpp
gfx/layers/d3d9/TextureD3D9.cpp
gfx/layers/d3d9/TextureD3D9.h
gfx/layers/ipc/SharedRGBImage.cpp
gfx/tests/gtest/TestTextures.cpp
image/src/RasterImage.cpp
layout/base/FrameLayerBuilder.cpp
media/webrtc/signaling/src/mediapipeline/MediaPipeline.cpp
media/webrtc/signaling/src/peerconnection/PeerConnectionMedia.h
media/webrtc/signaling/test/FakeMediaStreamsImpl.h
--- a/content/media/MediaDecoderReader.cpp
+++ b/content/media/MediaDecoderReader.cpp
@@ -216,33 +216,33 @@ VideoData* VideoData::Create(VideoInfo& 
                                        aKeyframe,
                                        aTimecode,
                                        aInfo.mDisplay));
   const YCbCrBuffer::Plane &Y = aBuffer.mPlanes[0];
   const YCbCrBuffer::Plane &Cb = aBuffer.mPlanes[1];
   const YCbCrBuffer::Plane &Cr = aBuffer.mPlanes[2];
 
   if (!aImage) {
-    // Currently our decoder only knows how to output to PLANAR_YCBCR
+    // Currently our decoder only knows how to output to ImageFormat::PLANAR_YCBCR
     // format.
-    ImageFormat format[2] = {PLANAR_YCBCR, GRALLOC_PLANAR_YCBCR};
+    ImageFormat format[2] = {ImageFormat::PLANAR_YCBCR, ImageFormat::GRALLOC_PLANAR_YCBCR};
     if (IsYV12Format(Y, Cb, Cr)) {
       v->mImage = aContainer->CreateImage(format, 2);
     } else {
       v->mImage = aContainer->CreateImage(format, 1);
     }
   } else {
     v->mImage = aImage;
   }
 
   if (!v->mImage) {
     return nullptr;
   }
-  NS_ASSERTION(v->mImage->GetFormat() == PLANAR_YCBCR ||
-               v->mImage->GetFormat() == GRALLOC_PLANAR_YCBCR,
+  NS_ASSERTION(v->mImage->GetFormat() == ImageFormat::PLANAR_YCBCR ||
+               v->mImage->GetFormat() == ImageFormat::GRALLOC_PLANAR_YCBCR,
                "Wrong format?");
   PlanarYCbCrImage* videoImage = static_cast<PlanarYCbCrImage*>(v->mImage.get());
 
   PlanarYCbCrData data;
   data.mYChannel = Y.mData + Y.mOffset;
   data.mYSize = IntSize(Y.mWidth, Y.mHeight);
   data.mYStride = Y.mStride;
   data.mYSkip = Y.mSkip;
@@ -358,22 +358,22 @@ VideoData* VideoData::Create(VideoInfo& 
 
   nsAutoPtr<VideoData> v(new VideoData(aOffset,
                                        aTime,
                                        aDuration,
                                        aKeyframe,
                                        aTimecode,
                                        aInfo.mDisplay));
 
-  ImageFormat format = GRALLOC_PLANAR_YCBCR;
+  ImageFormat format = ImageFormat::GRALLOC_PLANAR_YCBCR;
   v->mImage = aContainer->CreateImage(&format, 1);
   if (!v->mImage) {
     return nullptr;
   }
-  NS_ASSERTION(v->mImage->GetFormat() == GRALLOC_PLANAR_YCBCR,
+  NS_ASSERTION(v->mImage->GetFormat() == ImageFormat::GRALLOC_PLANAR_YCBCR,
                "Wrong format?");
   typedef mozilla::layers::GrallocImage GrallocImage;
   GrallocImage* videoImage = static_cast<GrallocImage*>(v->mImage.get());
   GrallocImage::GrallocData data;
 
   data.mPicSize = aPicture.Size().ToIntSize();
   data.mGraphicBuffer = aBuffer;
 
@@ -384,17 +384,17 @@ VideoData* VideoData::Create(VideoInfo& 
 #endif  // MOZ_OMX_DECODER
 
 void* MediaDecoderReader::VideoQueueMemoryFunctor::operator()(void* anObject) {
   const VideoData* v = static_cast<const VideoData*>(anObject);
   if (!v->mImage) {
     return nullptr;
   }
 
-  if (v->mImage->GetFormat() == PLANAR_YCBCR) {
+  if (v->mImage->GetFormat() == ImageFormat::PLANAR_YCBCR) {
     mozilla::layers::PlanarYCbCrImage* vi = static_cast<mozilla::layers::PlanarYCbCrImage*>(v->mImage.get());
     mResult += vi->GetDataSize();
   }
   return nullptr;
 }
 
 MediaDecoderReader::MediaDecoderReader(AbstractMediaDecoder* aDecoder)
   : mDecoder(aDecoder),
--- a/content/media/MediaDecoderReader.h
+++ b/content/media/MediaDecoderReader.h
@@ -28,17 +28,17 @@ namespace dom {
 class TimeRanges;
 }
 
 // Stores info relevant to presenting media frames.
 class VideoInfo {
 public:
   VideoInfo()
     : mDisplay(0,0),
-      mStereoMode(STEREO_MODE_MONO),
+      mStereoMode(StereoMode::MONO),
       mHasVideo(false)
   {}
 
   // Returns true if it's safe to use aPicture as the picture to be
   // extracted inside a frame of size aFrame, and scaled up to and displayed
   // at a size of aDisplay. You should validate the frame, picture, and
   // display regions before using them to display video frames.
   static bool ValidateVideoRegion(const nsIntSize& aFrame,
--- a/content/media/MediaStreamGraph.cpp
+++ b/content/media/MediaStreamGraph.cpp
@@ -950,17 +950,17 @@ MediaStreamGraphImpl::PlayVideo(MediaStr
   GraphTime startTime = StreamTimeToGraphTime(aStream,
       track->TicksToTimeRoundDown(start), INCLUDE_TRAILING_BLOCKED_INTERVAL);
   TimeStamp targetTime = mCurrentTimeStamp +
       TimeDuration::FromMilliseconds(double(startTime - mCurrentTime));
   for (uint32_t i = 0; i < aStream->mVideoOutputs.Length(); ++i) {
     VideoFrameContainer* output = aStream->mVideoOutputs[i];
 
     if (frame->GetForceBlack()) {
-      static const ImageFormat formats[1] = { PLANAR_YCBCR };
+      static const ImageFormat formats[1] = { ImageFormat::PLANAR_YCBCR };
       nsRefPtr<Image> image =
         output->GetImageContainer()->CreateImage(formats, 1);
       if (image) {
         // Sets the image to a single black pixel, which will be scaled to fill
         // the rendered size.
         SetImageToBlackPixel(static_cast<PlanarYCbCrImage*>(image.get()));
       }
       output->SetCurrentFrame(frame->GetIntrinsicSize(), image,
--- a/content/media/encoder/VP8TrackEncoder.cpp
+++ b/content/media/encoder/VP8TrackEncoder.cpp
@@ -243,17 +243,17 @@ nsresult VP8TrackEncoder::PrepareRawFram
   if (aChunk.mFrame.GetForceBlack()) {
     PrepareMutedFrame();
   } else {
     layers::Image* img = aChunk.mFrame.GetImage();
     if (NS_WARN_IF(!img)) {
       return NS_ERROR_NULL_POINTER;
     }
     ImageFormat format = img->GetFormat();
-    if (format != PLANAR_YCBCR) {
+    if (format != ImageFormat::PLANAR_YCBCR) {
       VP8LOG("Unsupported video format\n");
       return NS_ERROR_FAILURE;
     }
 
     // Cast away constness b/c some of the accessors are non-const
     layers::PlanarYCbCrImage* yuv =
     const_cast<layers::PlanarYCbCrImage *>(static_cast<const layers::PlanarYCbCrImage *>(img));
     // Big-time assumption here that this is all contiguous data coming
--- a/content/media/gstreamer/GStreamerReader.cpp
+++ b/content/media/gstreamer/GStreamerReader.cpp
@@ -916,17 +916,17 @@ GstFlowReturn GStreamerReader::AllocateV
                                                        guint64 aOffset,
                                                        guint aSize,
                                                        GstCaps* aCaps,
                                                        GstBuffer** aBuf,
                                                        nsRefPtr<PlanarYCbCrImage>& aImage)
 {
   /* allocate an image using the container */
   ImageContainer* container = mDecoder->GetImageContainer();
-  ImageFormat format = PLANAR_YCBCR;
+  ImageFormat format = ImageFormat::PLANAR_YCBCR;
   PlanarYCbCrImage* img = reinterpret_cast<PlanarYCbCrImage*>(container->CreateImage(&format, 1).get());
   nsRefPtr<PlanarYCbCrImage> image = dont_AddRef(img);
 
   /* prepare a GstBuffer pointing to the underlying PlanarYCbCrImage buffer */
   GstBuffer* buf = GST_BUFFER(gst_moz_video_buffer_new());
   GST_BUFFER_SIZE(buf) = aSize;
   /* allocate the actual YUV buffer */
   GST_BUFFER_DATA(buf) = image->AllocateAndGetNewBuffer(aSize);
--- a/content/media/omx/OMXCodecWrapper.cpp
+++ b/content/media/omx/OMXCodecWrapper.cpp
@@ -244,17 +244,17 @@ OMXVideoEncoder::Encode(const Image* aIm
     memset(dst + yLen, 0x80, uvLen);
   } else {
     Image* img = const_cast<Image*>(aImage);
     ImageFormat format = img->GetFormat();
 
     MOZ_ASSERT(aWidth == img->GetSize().width &&
                aHeight == img->GetSize().height);
 
-    if (format == GRALLOC_PLANAR_YCBCR) {
+    if (format == ImageFormat::GRALLOC_PLANAR_YCBCR) {
       // Get graphic buffer pointer.
       void* imgPtr = nullptr;
       GrallocImage* nativeImage = static_cast<GrallocImage*>(img);
       SurfaceDescriptor handle = nativeImage->GetSurfaceDescriptor();
       SurfaceDescriptorGralloc gralloc = handle.get_SurfaceDescriptorGralloc();
       sp<GraphicBuffer> graphicBuffer = GrallocBufferActor::GetFrom(gralloc);
       graphicBuffer->lock(GraphicBuffer::USAGE_SW_READ_MASK, &imgPtr);
       uint8_t* src = static_cast<uint8_t*>(imgPtr);
@@ -279,17 +279,17 @@ OMXVideoEncoder::Encode(const Image* aIm
       nv21.mCbCrStride = aWidth;
       // 4:2:0.
       nv21.mCbCrSize.width = aWidth / 2;
       nv21.mCbCrSize.height = aHeight / 2;
 
       ConvertPlanarYCbCrToNV12(&nv21, dst);
 
       graphicBuffer->unlock();
-    } else if (format == PLANAR_YCBCR) {
+    } else if (format == ImageFormat::PLANAR_YCBCR) {
       ConvertPlanarYCbCrToNV12(static_cast<PlanarYCbCrImage*>(img)->GetData(),
                              dst);
     } else {
       // TODO: support RGB to YUV color conversion.
       NS_ERROR("Unsupported input image type.");
     }
   }
 
--- a/content/media/plugins/MediaPluginReader.cpp
+++ b/content/media/plugins/MediaPluginReader.cpp
@@ -368,17 +368,17 @@ MediaPluginReader::ImageBufferCallback::
       return nullptr;
   }
 }
 
 uint8_t *
 MediaPluginReader::ImageBufferCallback::CreateI420Image(size_t aWidth,
                                                         size_t aHeight)
 {
-  ImageFormat format = PLANAR_YCBCR;
+  ImageFormat format = ImageFormat::PLANAR_YCBCR;
 
   mImage = mImageContainer->CreateImage(&format, 1 /* numFormats */);
   PlanarYCbCrImage *yuvImage = static_cast<PlanarYCbCrImage *>(mImage.get());
 
   if (!yuvImage) {
     NS_WARNING("Could not create I420 image");
     return nullptr;
   }
--- a/content/media/webm/WebMReader.cpp
+++ b/content/media/webm/WebMReader.cpp
@@ -339,29 +339,29 @@ nsresult WebMReader::ReadMetadata(MediaI
       mInfo.mVideo.mHasVideo = true;
 
       mInfo.mVideo.mDisplay = displaySize;
       mPicture = pictureRect;
       mInitialFrame = frameSize;
 
       switch (params.stereo_mode) {
       case NESTEGG_VIDEO_MONO:
-        mInfo.mVideo.mStereoMode = STEREO_MODE_MONO;
+        mInfo.mVideo.mStereoMode = StereoMode::MONO;
         break;
       case NESTEGG_VIDEO_STEREO_LEFT_RIGHT:
-        mInfo.mVideo.mStereoMode = STEREO_MODE_LEFT_RIGHT;
+        mInfo.mVideo.mStereoMode = StereoMode::LEFT_RIGHT;
         break;
       case NESTEGG_VIDEO_STEREO_BOTTOM_TOP:
-        mInfo.mVideo.mStereoMode = STEREO_MODE_BOTTOM_TOP;
+        mInfo.mVideo.mStereoMode = StereoMode::BOTTOM_TOP;
         break;
       case NESTEGG_VIDEO_STEREO_TOP_BOTTOM:
-        mInfo.mVideo.mStereoMode = STEREO_MODE_TOP_BOTTOM;
+        mInfo.mVideo.mStereoMode = StereoMode::TOP_BOTTOM;
         break;
       case NESTEGG_VIDEO_STEREO_RIGHT_LEFT:
-        mInfo.mVideo.mStereoMode = STEREO_MODE_RIGHT_LEFT;
+        mInfo.mVideo.mStereoMode = StereoMode::RIGHT_LEFT;
         break;
       }
     }
     else if (!mHasAudio && type == NESTEGG_TRACK_AUDIO) {
       nestegg_audio_params params;
       r = nestegg_track_audio_params(mContext, track, &params);
       if (r == -1) {
         Cleanup();
--- a/content/media/webrtc/MediaEngineDefault.cpp
+++ b/content/media/webrtc/MediaEngineDefault.cpp
@@ -98,17 +98,17 @@ static void AllocateSolidColorFrame(laye
   aData.mYStride = aWidth;
   aData.mCbCrStride = aWidth>>1;
   aData.mCbChannel = frame + yLen;
   aData.mCrChannel = aData.mCbChannel + cbLen;
   aData.mCbCrSize = IntSize(aWidth>>1, aHeight>>1);
   aData.mPicX = 0;
   aData.mPicY = 0;
   aData.mPicSize = IntSize(aWidth, aHeight);
-  aData.mStereoMode = STEREO_MODE_MONO;
+  aData.mStereoMode = StereoMode::MONO;
 }
 
 static void ReleaseFrame(layers::PlanarYCbCrData& aData)
 {
   PR_Free(aData.mYChannel);
 }
 
 nsresult
@@ -202,17 +202,17 @@ MediaEngineDefaultVideoSource::Notify(ns
     } else {
       mCr--;
     }
   } else {
     mCr--;
   }
 
   // Allocate a single solid color image
-  ImageFormat format = PLANAR_YCBCR;
+  ImageFormat format = ImageFormat::PLANAR_YCBCR;
   nsRefPtr<layers::Image> image = mImageContainer->CreateImage(&format, 1);
   nsRefPtr<layers::PlanarYCbCrImage> ycbcr_image =
       static_cast<layers::PlanarYCbCrImage*>(image.get());
   layers::PlanarYCbCrData data;
   AllocateSolidColorFrame(data, mOpts.mWidth, mOpts.mHeight, 0x80, mCb, mCr);
   ycbcr_image->SetData(data);
   // SetData copies data, so we can free the frame
   ReleaseFrame(data);
--- a/content/media/webrtc/MediaEngineWebRTCVideo.cpp
+++ b/content/media/webrtc/MediaEngineWebRTCVideo.cpp
@@ -62,17 +62,17 @@ MediaEngineWebRTCVideoSource::DeliverFra
   }
 
   MOZ_ASSERT(mWidth*mHeight*3/2 == size);
   if (mWidth*mHeight*3/2 != size) {
     return 0;
   }
 
   // Create a video frame and append it to the track.
-  ImageFormat format = PLANAR_YCBCR;
+  ImageFormat format = ImageFormat::PLANAR_YCBCR;
 
   nsRefPtr<layers::Image> image = mImageContainer->CreateImage(&format, 1);
 
   layers::PlanarYCbCrImage* videoImage = static_cast<layers::PlanarYCbCrImage*>(image.get());
 
   uint8_t* frame = static_cast<uint8_t*> (buffer);
   const uint8_t lumaBpp = 8;
   const uint8_t chromaBpp = 4;
@@ -83,17 +83,17 @@ MediaEngineWebRTCVideoSource::DeliverFra
   data.mYStride = mWidth * lumaBpp/ 8;
   data.mCbCrStride = mWidth * chromaBpp / 8;
   data.mCbChannel = frame + mHeight * data.mYStride;
   data.mCrChannel = data.mCbChannel + mHeight * data.mCbCrStride / 2;
   data.mCbCrSize = IntSize(mWidth/ 2, mHeight/ 2);
   data.mPicX = 0;
   data.mPicY = 0;
   data.mPicSize = IntSize(mWidth, mHeight);
-  data.mStereoMode = STEREO_MODE_MONO;
+  data.mStereoMode = StereoMode::MONO;
 
   videoImage->SetData(data);
 
 #ifdef DEBUG
   static uint32_t frame_num = 0;
   LOGFRAME(("frame %d (%dx%d); timestamp %u, render_time %lu", frame_num++,
             mWidth, mHeight, time_stamp, render_time));
 #endif
--- a/content/media/wmf/DXVA2Manager.cpp
+++ b/content/media/wmf/DXVA2Manager.cpp
@@ -149,20 +149,20 @@ D3D9DXVA2Manager::CopyToImage(IMFSample*
 
   nsRefPtr<IDirect3DSurface9> surface;
   hr = wmf::MFGetService(buffer,
                          MR_BUFFER_SERVICE,
                          IID_IDirect3DSurface9,
                          getter_AddRefs(surface));
   NS_ENSURE_TRUE(SUCCEEDED(hr), hr);
 
-  ImageFormat format = D3D9_RGB32_TEXTURE;
+  ImageFormat format = ImageFormat::D3D9_RGB32_TEXTURE;
   nsRefPtr<Image> image = aImageContainer->CreateImage(&format, 1);
   NS_ENSURE_TRUE(image, E_FAIL);
-  NS_ASSERTION(image->GetFormat() == D3D9_RGB32_TEXTURE,
+  NS_ASSERTION(image->GetFormat() == ImageFormat::D3D9_RGB32_TEXTURE,
                "Wrong format?");
 
   D3D9SurfaceImage* videoImage = static_cast<D3D9SurfaceImage*>(image.get());
   hr = videoImage->SetData(D3D9SurfaceImage::Data(surface, aRegion));
 
   image.forget(aOutImage);
 
   return S_OK;
--- a/dom/plugins/ipc/PluginInstanceParent.cpp
+++ b/dom/plugins/ipc/PluginInstanceParent.cpp
@@ -633,19 +633,19 @@ PluginInstanceParent::RecvShow(const NPR
         // Notify the cairo backend that this surface has changed behind
         // its back.
         gfxRect ur(updatedRect.left, updatedRect.top,
                    updatedRect.right - updatedRect.left,
                    updatedRect.bottom - updatedRect.top);
         surface->MarkDirty(ur);
 
         ImageContainer *container = GetImageContainer();
-        ImageFormat format = CAIRO_SURFACE;
+        ImageFormat format = ImageFormat::CAIRO_SURFACE;
         nsRefPtr<Image> image = container->CreateImage(&format, 1);
-        NS_ASSERTION(image->GetFormat() == CAIRO_SURFACE, "Wrong format?");
+        NS_ASSERTION(image->GetFormat() == ImageFormat::CAIRO_SURFACE, "Wrong format?");
         CairoImage* cairoImage = static_cast<CairoImage*>(image.get());
         CairoImage::Data cairoData;
         cairoData.mDeprecatedSurface = surface;
         cairoData.mSize = surface->GetSize().ToIntSize();
         cairoData.mSourceSurface = gfxPlatform::GetPlatform()->GetSourceSurfaceForSurface(nullptr, surface);
         cairoImage->SetData(cairoData);
 
         container->SetCurrentImage(cairoImage);
@@ -714,23 +714,23 @@ PluginInstanceParent::GetImageContainer(
     if (IsAsyncDrawing()) {
       NS_IF_ADDREF(container);
       *aContainer = container;
       return NS_OK;
     }
 
 #ifdef XP_MACOSX
     if (ioSurface) {
-        ImageFormat format = MAC_IOSURFACE;
+        ImageFormat format = ImageFormat::MAC_IOSURFACE;
         nsRefPtr<Image> image = container->CreateImage(&format, 1);
         if (!image) {
             return NS_ERROR_FAILURE;
         }
 
-        NS_ASSERTION(image->GetFormat() == MAC_IOSURFACE, "Wrong format?");
+        NS_ASSERTION(image->GetFormat() == ImageFormat::MAC_IOSURFACE, "Wrong format?");
 
         MacIOSurfaceImage* pluginImage = static_cast<MacIOSurfaceImage*>(image.get());
         pluginImage->SetSurface(ioSurface);
 
         container->SetCurrentImageInTransaction(pluginImage);
 
         NS_IF_ADDREF(container);
         *aContainer = container;
--- a/gfx/layers/D3D9SurfaceImage.h
+++ b/gfx/layers/D3D9SurfaceImage.h
@@ -23,17 +23,17 @@ public:
 
   struct Data {
     Data(IDirect3DSurface9* aSurface, const nsIntRect& aRegion)
       : mSurface(aSurface), mRegion(aRegion) {}
     RefPtr<IDirect3DSurface9> mSurface;
     nsIntRect mRegion;
   };
 
-  D3D9SurfaceImage() : Image(nullptr, D3D9_RGB32_TEXTURE), mSize(0, 0) {}
+  D3D9SurfaceImage() : Image(nullptr, ImageFormat::D3D9_RGB32_TEXTURE), mSize(0, 0) {}
   virtual ~D3D9SurfaceImage() {}
 
   // Copies the surface into a sharable texture's surface, and initializes
   // the image.
   HRESULT SetData(const Data& aData);
 
   // Returns the description of the shared surface.
   const D3DSURFACE_DESC& GetDesc() const;
--- a/gfx/layers/GrallocImages.cpp
+++ b/gfx/layers/GrallocImages.cpp
@@ -46,17 +46,17 @@ struct GraphicBufferAutoUnlock {
 };
 
 GrallocImage::GrallocImage()
   : PlanarYCbCrImage(nullptr),
     mBufferAllocated(false),
     mGraphicBufferLocked(nullptr),
     mTextureClient(nullptr)
 {
-  mFormat = GRALLOC_PLANAR_YCBCR;
+  mFormat = ImageFormat::GRALLOC_PLANAR_YCBCR;
 }
 
 GrallocImage::~GrallocImage()
 {
   // If we have a texture client, the latter takes over the responsibility to
   // unlock the GraphicBufferLocked.
   if (mGraphicBufferLocked.get() && !mTextureClient) {
     // mBufferAllocated is set when gralloc buffer is allocated
--- a/gfx/layers/ImageContainer.cpp
+++ b/gfx/layers/ImageContainer.cpp
@@ -63,41 +63,41 @@ ImageFactory::CreateImage(const ImageFor
                           const gfx::IntSize &,
                           BufferRecycleBin *aRecycleBin)
 {
   if (!aNumFormats) {
     return nullptr;
   }
   nsRefPtr<Image> img;
 #ifdef MOZ_WIDGET_GONK
-  if (FormatInList(aFormats, aNumFormats, GRALLOC_PLANAR_YCBCR)) {
+  if (FormatInList(aFormats, aNumFormats, ImageFormat::GRALLOC_PLANAR_YCBCR)) {
     img = new GrallocImage();
     return img.forget();
   }
 #endif
-  if (FormatInList(aFormats, aNumFormats, PLANAR_YCBCR)) {
+  if (FormatInList(aFormats, aNumFormats, ImageFormat::PLANAR_YCBCR)) {
     img = new PlanarYCbCrImage(aRecycleBin);
     return img.forget();
   }
-  if (FormatInList(aFormats, aNumFormats, CAIRO_SURFACE)) {
+  if (FormatInList(aFormats, aNumFormats, ImageFormat::CAIRO_SURFACE)) {
     img = new CairoImage();
     return img.forget();
   }
-  if (FormatInList(aFormats, aNumFormats, SHARED_TEXTURE)) {
+  if (FormatInList(aFormats, aNumFormats, ImageFormat::SHARED_TEXTURE)) {
     img = new SharedTextureImage();
     return img.forget();
   }
 #ifdef XP_MACOSX
-  if (FormatInList(aFormats, aNumFormats, MAC_IOSURFACE)) {
+  if (FormatInList(aFormats, aNumFormats, ImageFormat::MAC_IOSURFACE)) {
     img = new MacIOSurfaceImage();
     return img.forget();
   }
 #endif
 #ifdef XP_WIN
-  if (FormatInList(aFormats, aNumFormats, D3D9_RGB32_TEXTURE)) {
+  if (FormatInList(aFormats, aNumFormats, ImageFormat::D3D9_RGB32_TEXTURE)) {
     img = new D3D9SurfaceImage();
     return img.forget();
   }
 #endif
   return nullptr;
 }
 
 BufferRecycleBin::BufferRecycleBin()
@@ -163,18 +163,17 @@ ImageContainer::~ImageContainer()
 
 already_AddRefed<Image>
 ImageContainer::CreateImage(const ImageFormat *aFormats,
                             uint32_t aNumFormats)
 {
   ReentrantMonitorAutoEnter mon(mReentrantMonitor);
 
   if (mImageClient) {
-    nsRefPtr<Image> img = mImageClient->CreateImage((uint32_t*)aFormats,
-                                                            aNumFormats);
+    nsRefPtr<Image> img = mImageClient->CreateImage(aFormats, aNumFormats);
     if (img) {
       return img.forget();
     }
   }
   return mImageFactory->CreateImage(aFormats, aNumFormats, mScaleHint, mRecycleBin);
 }
 
 void
@@ -311,17 +310,17 @@ ImageContainer::DeprecatedLockCurrentAsS
       NS_IF_ADDREF(mActiveImage);
       *aCurrentImage = mActiveImage.get();
     }
 
     if (!mActiveImage) {
       return nullptr;
     } 
 
-    if (mActiveImage->GetFormat() == REMOTE_IMAGE_BITMAP) {
+    if (mActiveImage->GetFormat() == ImageFormat::REMOTE_IMAGE_BITMAP) {
       nsRefPtr<gfxImageSurface> newSurf =
         new gfxImageSurface(mRemoteData->mBitmap.mData,
                             ThebesIntSize(mRemoteData->mSize),
                             mRemoteData->mBitmap.mStride,
                             mRemoteData->mFormat == RemoteImageData::BGRX32 ?
                                                    gfxImageFormat::ARGB32 :
                                                    gfxImageFormat::RGB24);
 
@@ -362,17 +361,17 @@ ImageContainer::LockCurrentAsSourceSurfa
       NS_IF_ADDREF(mActiveImage);
       *aCurrentImage = mActiveImage.get();
     }
 
     if (!mActiveImage) {
       return nullptr;
     }
 
-    if (mActiveImage->GetFormat() == REMOTE_IMAGE_BITMAP) {
+    if (mActiveImage->GetFormat() == ImageFormat::REMOTE_IMAGE_BITMAP) {
       gfxImageFormat fmt = mRemoteData->mFormat == RemoteImageData::BGRX32
                            ? gfxImageFormat::ARGB32
                            : gfxImageFormat::RGB24;
 
       RefPtr<gfx::DataSourceSurface> newSurf
         = gfx::Factory::CreateWrappingDataSourceSurface(mRemoteData->mBitmap.mData,
                                                         mRemoteData->mBitmap.mStride,
                                                         mRemoteData->mSize,
@@ -519,17 +518,17 @@ ImageContainer::EnsureActiveImage()
       mActiveImage = newImg;
     }
 #endif
   }
 }
 
 
 PlanarYCbCrImage::PlanarYCbCrImage(BufferRecycleBin *aRecycleBin)
-  : Image(nullptr, PLANAR_YCBCR)
+  : Image(nullptr, ImageFormat::PLANAR_YCBCR)
   , mBufferSize(0)
   , mOffscreenFormat(gfxImageFormat::Unknown)
   , mRecycleBin(aRecycleBin)
 {
 }
 
 PlanarYCbCrImage::~PlanarYCbCrImage()
 {
--- a/gfx/layers/ImageContainer.h
+++ b/gfx/layers/ImageContainer.h
@@ -785,17 +785,17 @@ struct PlanarYCbCrData {
                      mPicSize.width,
                      mPicSize.height);
   }
 
   PlanarYCbCrData()
     : mYChannel(nullptr), mYStride(0), mYSize(0, 0), mYSkip(0)
     , mCbChannel(nullptr), mCrChannel(nullptr)
     , mCbCrStride(0), mCbCrSize(0, 0) , mCbSkip(0), mCrSkip(0)
-    , mPicX(0), mPicY(0), mPicSize(0, 0), mStereoMode(STEREO_MODE_MONO)
+    , mPicX(0), mPicY(0), mPicSize(0, 0), mStereoMode(StereoMode::MONO)
   {}
 };
 
 /****** Image subtypes for the different formats ******/
 
 /**
  * We assume that the image data is in the REC 470M color space (see
  * Theora specification, section 4.3.1).
@@ -953,29 +953,29 @@ public:
   virtual already_AddRefed<gfxASurface> DeprecatedGetAsSurface()
   {
     nsRefPtr<gfxASurface> surface = mDeprecatedSurface.get();
     return surface.forget();
   }
 
   gfx::IntSize GetSize() { return mSize; }
 
-  CairoImage() : Image(nullptr, CAIRO_SURFACE) {}
+  CairoImage() : Image(nullptr, ImageFormat::CAIRO_SURFACE) {}
 
   nsCountedRef<nsMainThreadSurfaceRef> mDeprecatedSurface;
   gfx::IntSize mSize;
 
   // mSourceSurface wraps mDeprrecatedSurface's data, therefore it should not
   // outlive mDeprecatedSurface
   nsCountedRef<nsMainThreadSourceSurfaceRef> mSourceSurface;
 };
 
 class RemoteBitmapImage : public Image {
 public:
-  RemoteBitmapImage() : Image(nullptr, REMOTE_IMAGE_BITMAP) {}
+  RemoteBitmapImage() : Image(nullptr, ImageFormat::REMOTE_IMAGE_BITMAP) {}
 
   already_AddRefed<gfxASurface> DeprecatedGetAsSurface();
   TemporaryRef<gfx::SourceSurface> GetAsSourceSurface();
 
   gfx::IntSize GetSize() { return mSize; }
 
   unsigned char *mData;
   int mStride;
--- a/gfx/layers/ImageTypes.h
+++ b/gfx/layers/ImageTypes.h
@@ -1,19 +1,21 @@
 /* -*- Mode: C++; tab-width: 20; indent-tabs-mode: nil; c-basic-offset: 2 -*-
  * This Source Code Form is subject to the terms of the Mozilla Public
  * License, v. 2.0. If a copy of the MPL was not distributed with this
  * file, You can obtain one at http://mozilla.org/MPL/2.0/. */
 
 #ifndef GFX_IMAGETYPES_H
 #define GFX_IMAGETYPES_H
 
+#include "mozilla/TypedEnum.h"
+
 namespace mozilla {
 
-enum ImageFormat {
+MOZ_BEGIN_ENUM_CLASS(ImageFormat)
   /**
    * The PLANAR_YCBCR format creates a PlanarYCbCrImage. All backends should
    * support this format, because the Ogg video decoder depends on it.
    * The maximum image width and height is 16384.
    */
   PLANAR_YCBCR,
 
   /**
@@ -65,24 +67,21 @@ enum ImageFormat {
    */
   REMOTE_IMAGE_DXGI_TEXTURE,
 
   /**
    * The D3D9_RGB32_TEXTURE format creates a D3D9SurfaceImage, and wraps a
    * IDirect3DTexture9 in RGB32 layout.
    */
   D3D9_RGB32_TEXTURE
-
-};
-
+MOZ_END_ENUM_CLASS(ImageFormat)
 
-enum StereoMode {
-  STEREO_MODE_MONO,
-  STEREO_MODE_LEFT_RIGHT,
-  STEREO_MODE_RIGHT_LEFT,
-  STEREO_MODE_BOTTOM_TOP,
-  STEREO_MODE_TOP_BOTTOM
-};
-
+MOZ_BEGIN_ENUM_CLASS(StereoMode)
+  MONO,
+  LEFT_RIGHT,
+  RIGHT_LEFT,
+  BOTTOM_TOP,
+  TOP_BOTTOM
+MOZ_END_ENUM_CLASS(StereoMode)
 
 } // namespace
 
 #endif
--- a/gfx/layers/MacIOSurfaceImage.h
+++ b/gfx/layers/MacIOSurfaceImage.h
@@ -49,17 +49,17 @@ public:
     return imgSurface.forget();
   }
 
   virtual TemporaryRef<gfx::SourceSurface> GetAsSourceSurface();
 
   virtual TextureClient* GetTextureClient() MOZ_OVERRIDE;
   virtual uint8_t* GetBuffer() MOZ_OVERRIDE { return nullptr; }
 
-  MacIOSurfaceImage() : Image(nullptr, MAC_IOSURFACE) {}
+  MacIOSurfaceImage() : Image(nullptr, ImageFormat::MAC_IOSURFACE) {}
 
 private:
   RefPtr<MacIOSurface> mSurface;
   RefPtr<TextureClient> mTextureClient;
 };
 
 } // layers
 } // mozilla
--- a/gfx/layers/SharedTextureImage.h
+++ b/gfx/layers/SharedTextureImage.h
@@ -39,17 +39,17 @@ public:
     return nullptr;
   }
 
   virtual TemporaryRef<gfx::SourceSurface> GetAsSourceSurface() MOZ_OVERRIDE
   {
     return nullptr;
   }
 
-  SharedTextureImage() : Image(nullptr, SHARED_TEXTURE) {}
+  SharedTextureImage() : Image(nullptr, ImageFormat::SHARED_TEXTURE) {}
 
 private:
   Data mData;
 };
 
 } // layers
 } // mozilla
 
--- a/gfx/layers/basic/BasicImages.cpp
+++ b/gfx/layers/basic/BasicImages.cpp
@@ -72,17 +72,17 @@ public:
                                               const gfx::IntSize &aScaleHint,
                                               BufferRecycleBin *aRecycleBin)
   {
     if (!aNumFormats) {
       return nullptr;
     }
 
     nsRefPtr<Image> image;
-    if (aFormats[0] == PLANAR_YCBCR) {
+    if (aFormats[0] == ImageFormat::PLANAR_YCBCR) {
       image = new BasicPlanarYCbCrImage(aScaleHint, gfxPlatform::GetPlatform()->GetOffscreenFormat(), aRecycleBin);
       return image.forget();
     }
 
     return ImageFactory::CreateImage(aFormats, aNumFormats, aScaleHint, aRecycleBin);
   }
 };
 
--- a/gfx/layers/client/ImageClient.cpp
+++ b/gfx/layers/client/ImageClient.cpp
@@ -151,17 +151,17 @@ ImageClientSingle::UpdateImage(ImageCont
     }
     mFrontBuffer = texture;
     if (!AddTextureClient(texture)) {
       mFrontBuffer = nullptr;
       return false;
     }
     GetForwarder()->UpdatedTexture(this, texture, nullptr);
     GetForwarder()->UseTexture(this, texture);
-  } else if (image->GetFormat() == PLANAR_YCBCR) {
+  } else if (image->GetFormat() == ImageFormat::PLANAR_YCBCR) {
     PlanarYCbCrImage* ycbcr = static_cast<PlanarYCbCrImage*>(image);
     const PlanarYCbCrData* data = ycbcr->GetData();
     if (!data) {
       return false;
     }
 
     if (mFrontBuffer && mFrontBuffer->IsImmutable()) {
       GetForwarder()->AddForceRemovingTexture(mFrontBuffer);
@@ -196,17 +196,17 @@ ImageClientSingle::UpdateImage(ImageCont
     if (status) {
       GetForwarder()->UpdatedTexture(this, mFrontBuffer, nullptr);
       GetForwarder()->UseTexture(this, mFrontBuffer);
     } else {
       MOZ_ASSERT(false);
       return false;
     }
 
-  } else if (image->GetFormat() == SHARED_TEXTURE) {
+  } else if (image->GetFormat() == ImageFormat::SHARED_TEXTURE) {
     SharedTextureImage* sharedImage = static_cast<SharedTextureImage*>(image);
     const SharedTextureImage::Data *data = sharedImage->GetData();
     gfx::IntSize size = gfx::IntSize(image->GetSize().width, image->GetSize().height);
 
     if (mFrontBuffer) {
       GetForwarder()->AddForceRemovingTexture(mFrontBuffer);
       mFrontBuffer = nullptr;
     }
@@ -363,17 +363,17 @@ DeprecatedImageClientSingle::UpdateImage
   if (!image) {
     return false;
   }
 
   if (mLastPaintedImageSerial == image->GetSerial()) {
     return true;
   }
 
-  if (image->GetFormat() == PLANAR_YCBCR &&
+  if (image->GetFormat() == ImageFormat::PLANAR_YCBCR &&
       EnsureDeprecatedTextureClient(TEXTURE_YCBCR)) {
     PlanarYCbCrImage* ycbcr = static_cast<PlanarYCbCrImage*>(image);
 
     if (ycbcr->AsDeprecatedSharedPlanarYCbCrImage()) {
       AutoLockDeprecatedTextureClient lock(mDeprecatedTextureClient);
 
       SurfaceDescriptor sd;
       if (!ycbcr->AsDeprecatedSharedPlanarYCbCrImage()->ToSurfaceDescriptor(sd)) {
@@ -388,42 +388,42 @@ DeprecatedImageClientSingle::UpdateImage
     } else {
       AutoLockYCbCrClient clientLock(mDeprecatedTextureClient);
 
       if (!clientLock.Update(ycbcr)) {
         NS_WARNING("failed to update DeprecatedTextureClient (YCbCr)");
         return false;
       }
     }
-  } else if (image->GetFormat() == SHARED_TEXTURE &&
+  } else if (image->GetFormat() == ImageFormat::SHARED_TEXTURE &&
              EnsureDeprecatedTextureClient(TEXTURE_SHARED_GL_EXTERNAL)) {
     SharedTextureImage* sharedImage = static_cast<SharedTextureImage*>(image);
     const SharedTextureImage::Data *data = sharedImage->GetData();
 
     SharedTextureDescriptor texture(data->mShareType,
                                     data->mHandle,
                                     data->mSize,
                                     data->mInverted);
     mDeprecatedTextureClient->SetDescriptor(SurfaceDescriptor(texture));
-  } else if (image->GetFormat() == SHARED_RGB &&
+  } else if (image->GetFormat() == ImageFormat::SHARED_RGB &&
              EnsureDeprecatedTextureClient(TEXTURE_SHMEM)) {
     nsIntRect rect(0, 0,
                    image->GetSize().width,
                    image->GetSize().height);
     UpdatePictureRect(rect);
 
     AutoLockDeprecatedTextureClient lock(mDeprecatedTextureClient);
 
     SurfaceDescriptor desc;
     if (!static_cast<DeprecatedSharedRGBImage*>(image)->ToSurfaceDescriptor(desc)) {
       return false;
     }
     mDeprecatedTextureClient->SetDescriptor(desc);
 #ifdef MOZ_WIDGET_GONK
-  } else if (image->GetFormat() == GRALLOC_PLANAR_YCBCR) {
+  } else if (image->GetFormat() == ImageFormat::GRALLOC_PLANAR_YCBCR) {
     EnsureDeprecatedTextureClient(TEXTURE_SHARED_GL_EXTERNAL);
 
     nsIntRect rect(0, 0,
                    image->GetSize().width,
                    image->GetSize().height);
     UpdatePictureRect(rect);
 
     AutoLockDeprecatedTextureClient lock(mDeprecatedTextureClient);
@@ -445,17 +445,17 @@ DeprecatedImageClientSingle::UpdateImage
     if (!clientLock.Update(image, aContentFlags, surface)) {
       NS_WARNING("failed to update DeprecatedTextureClient");
       return false;
     }
   }
 
   Updated();
 
-  if (image->GetFormat() == PLANAR_YCBCR) {
+  if (image->GetFormat() == ImageFormat::PLANAR_YCBCR) {
     PlanarYCbCrImage* ycbcr = static_cast<PlanarYCbCrImage*>(image);
     UpdatePictureRect(ycbcr->GetData()->GetPictureRect());
   }
 
   mLastPaintedImageSerial = image->GetSerial();
   aContainer->NotifyPaintedImage(image);
   return true;
 }
@@ -487,56 +487,60 @@ ImageClientBridge::UpdateImage(ImageCont
   static_cast<ShadowLayerForwarder*>(GetForwarder())->AttachAsyncCompositable(mAsyncContainerID, mLayer);
   AutoLockImage autoLock(aContainer);
   aContainer->NotifyPaintedImage(autoLock.GetImage());
   Updated();
   return true;
 }
 
 already_AddRefed<Image>
-ImageClientSingle::CreateImage(const uint32_t *aFormats,
+ImageClientSingle::CreateImage(const ImageFormat *aFormats,
                                uint32_t aNumFormats)
 {
   nsRefPtr<Image> img;
   for (uint32_t i = 0; i < aNumFormats; i++) {
     switch (aFormats[i]) {
-      case PLANAR_YCBCR:
+      case ImageFormat::PLANAR_YCBCR:
         img = new SharedPlanarYCbCrImage(this);
         return img.forget();
-      case SHARED_RGB:
+      case ImageFormat::SHARED_RGB:
         img = new SharedRGBImage(this);
         return img.forget();
 #ifdef MOZ_WIDGET_GONK
-      case GRALLOC_PLANAR_YCBCR:
+      case ImageFormat::GRALLOC_PLANAR_YCBCR:
         img = new GrallocImage();
         return img.forget();
 #endif
+      default:
+        continue; // keep iterating over aFormats
     }
   }
   return nullptr;
 }
 
 already_AddRefed<Image>
-DeprecatedImageClientSingle::CreateImage(const uint32_t *aFormats,
+DeprecatedImageClientSingle::CreateImage(const ImageFormat *aFormats,
                                          uint32_t aNumFormats)
 {
   nsRefPtr<Image> img;
   for (uint32_t i = 0; i < aNumFormats; i++) {
     switch (aFormats[i]) {
-      case PLANAR_YCBCR:
+      case ImageFormat::PLANAR_YCBCR:
         img = new DeprecatedSharedPlanarYCbCrImage(GetForwarder());
         return img.forget();
-      case SHARED_RGB:
+      case ImageFormat::SHARED_RGB:
         img = new DeprecatedSharedRGBImage(GetForwarder());
         return img.forget();
 #ifdef MOZ_WIDGET_GONK
-      case GRALLOC_PLANAR_YCBCR:
+      case ImageFormat::GRALLOC_PLANAR_YCBCR:
         img = new GrallocImage();
         return img.forget();
 #endif
+      default:
+        continue; // keep iterating over aFormats
     }
   }
   return nullptr;
 }
 
 
 }
 }
--- a/gfx/layers/client/ImageClient.h
+++ b/gfx/layers/client/ImageClient.h
@@ -54,17 +54,17 @@ public:
   virtual bool UpdateImage(ImageContainer* aContainer, uint32_t aContentFlags) = 0;
 
   /**
    * The picture rect is the area of the texture which makes up the image. That
    * is, the area that should be composited. In texture space.
    */
   virtual void UpdatePictureRect(nsIntRect aPictureRect);
 
-  virtual already_AddRefed<Image> CreateImage(const uint32_t *aFormats,
+  virtual already_AddRefed<Image> CreateImage(const ImageFormat *aFormats,
                                               uint32_t aNumFormats) = 0;
 
   /**
    * Synchronously remove all the textures used by the image client.
    */
   virtual void FlushAllImages(bool aExceptFront) {}
 
 protected:
@@ -92,17 +92,17 @@ public:
   virtual bool AddTextureClient(TextureClient* aTexture) MOZ_OVERRIDE;
 
   virtual TemporaryRef<BufferTextureClient>
   CreateBufferTextureClient(gfx::SurfaceFormat aFormat,
                             TextureFlags aFlags = TEXTURE_FLAGS_DEFAULT) MOZ_OVERRIDE;
 
   virtual TextureInfo GetTextureInfo() const MOZ_OVERRIDE;
 
-  virtual already_AddRefed<Image> CreateImage(const uint32_t *aFormats,
+  virtual already_AddRefed<Image> CreateImage(const ImageFormat *aFormats,
                                               uint32_t aNumFormats) MOZ_OVERRIDE;
 
   virtual void FlushAllImages(bool aExceptFront) MOZ_OVERRIDE;
 
 protected:
   RefPtr<TextureClient> mFrontBuffer;
   // Some layers may want to enforce some flags to all their textures
   // (like disallowing tiling)
@@ -161,17 +161,17 @@ public:
     mDeprecatedTextureClient->SetDescriptorFromReply(aDescriptor);
   }
 
   virtual TextureInfo GetTextureInfo() const MOZ_OVERRIDE
   {
     return mTextureInfo;
   }
 
-  virtual already_AddRefed<Image> CreateImage(const uint32_t *aFormats,
+  virtual already_AddRefed<Image> CreateImage(const ImageFormat *aFormats,
                                               uint32_t aNumFormats) MOZ_OVERRIDE;
 
 private:
   RefPtr<DeprecatedTextureClient> mDeprecatedTextureClient;
   TextureInfo mTextureInfo;
 };
 
 /**
@@ -198,17 +198,17 @@ public:
     return TextureInfo(mType);
   }
 
   virtual void SetIPDLActor(CompositableChild* aChild) MOZ_OVERRIDE
   {
     MOZ_ASSERT(!aChild, "ImageClientBridge should not have IPDL actor");
   }
 
-  virtual already_AddRefed<Image> CreateImage(const uint32_t *aFormats,
+  virtual already_AddRefed<Image> CreateImage(const ImageFormat *aFormats,
                                               uint32_t aNumFormats) MOZ_OVERRIDE
   {
     NS_WARNING("Should not create an image through an ImageClientBridge");
     return nullptr;
   }
 
 protected:
   uint64_t mAsyncContainerID;
--- a/gfx/layers/d3d10/ImageLayerD3D10.cpp
+++ b/gfx/layers/d3d10/ImageLayerD3D10.cpp
@@ -284,37 +284,37 @@ ImageLayerD3D10::RenderLayer()
     effect()->GetVariableByName("tCr")->AsShaderResource()->SetResource(data->mCrView);
 
     /*
      * Send 3d control data and metadata to NV3DVUtils
      */
     if (GetNv3DVUtils()) {
       Nv_Stereo_Mode mode;
       switch (yuvImage->GetData()->mStereoMode) {
-      case STEREO_MODE_LEFT_RIGHT:
+      case StereoMode::LEFT_RIGHT:
         mode = NV_STEREO_MODE_LEFT_RIGHT;
         break;
-      case STEREO_MODE_RIGHT_LEFT:
+      case StereoMode::RIGHT_LEFT:
         mode = NV_STEREO_MODE_RIGHT_LEFT;
         break;
-      case STEREO_MODE_BOTTOM_TOP:
+      case StereoMode::BOTTOM_TOP:
         mode = NV_STEREO_MODE_BOTTOM_TOP;
         break;
-      case STEREO_MODE_TOP_BOTTOM:
+      case StereoMode::TOP_BOTTOM:
         mode = NV_STEREO_MODE_TOP_BOTTOM;
         break;
-      case STEREO_MODE_MONO:
+      case StereoMode::MONO:
         mode = NV_STEREO_MODE_MONO;
         break;
       }
 
       // Send control data even in mono case so driver knows to leave stereo mode.
       GetNv3DVUtils()->SendNv3DVControl(mode, true, FIREFOX_3DV_APP_HANDLE);
 
-      if (yuvImage->GetData()->mStereoMode != STEREO_MODE_MONO) {
+      if (yuvImage->GetData()->mStereoMode != StereoMode::MONO) {
         // Dst resource is optional
         GetNv3DVUtils()->SendNv3DVMetaData((unsigned int)yuvImage->GetData()->mYSize.width,
                                            (unsigned int)yuvImage->GetData()->mYSize.height, (HANDLE)(data->mYTexture), (HANDLE)(nullptr));
       }
     }
 
     effect()->GetVariableByName("vLayerQuad")->AsVector()->SetFloatVector(
       ShaderConstantRectD3D10(
--- a/gfx/layers/d3d10/ImageLayerD3D10.h
+++ b/gfx/layers/d3d10/ImageLayerD3D10.h
@@ -53,17 +53,17 @@ struct PlanarYCbCrD3D10BackendData : pub
 struct TextureD3D10BackendData : public ImageBackendData
 {
   nsRefPtr<ID3D10Texture2D> mTexture;
   nsRefPtr<ID3D10ShaderResourceView> mSRView;
 };
 
 class RemoteDXGITextureImage : public Image {
 public:
-  RemoteDXGITextureImage() : Image(nullptr, REMOTE_IMAGE_DXGI_TEXTURE) {}
+  RemoteDXGITextureImage() : Image(nullptr, ImageFormat::REMOTE_IMAGE_DXGI_TEXTURE) {}
 
   already_AddRefed<gfxASurface> DeprecatedGetAsSurface();
   virtual TemporaryRef<gfx::SourceSurface> GetAsSourceSurface() MOZ_OVERRIDE;
 
   IntSize GetSize() { return mSize; }
 
   TextureD3D10BackendData *GetD3D10TextureBackendData(ID3D10Device *aDevice);
 
--- a/gfx/layers/d3d9/CompositorD3D9.cpp
+++ b/gfx/layers/d3d9/CompositorD3D9.cpp
@@ -355,37 +355,37 @@ CompositorD3D9::DrawQuad(const gfx::Rect
       MOZ_ASSERT(sourceCr->GetD3D9Texture());
 
       /*
        * Send 3d control data and metadata
        */
       if (mDeviceManager->GetNv3DVUtils()) {
         Nv_Stereo_Mode mode;
         switch (source->AsSourceD3D9()->GetStereoMode()) {
-        case STEREO_MODE_LEFT_RIGHT:
+        case StereoMode::LEFT_RIGHT:
           mode = NV_STEREO_MODE_LEFT_RIGHT;
           break;
-        case STEREO_MODE_RIGHT_LEFT:
+        case StereoMode::RIGHT_LEFT:
           mode = NV_STEREO_MODE_RIGHT_LEFT;
           break;
-        case STEREO_MODE_BOTTOM_TOP:
+        case StereoMode::BOTTOM_TOP:
           mode = NV_STEREO_MODE_BOTTOM_TOP;
           break;
-        case STEREO_MODE_TOP_BOTTOM:
+        case StereoMode::TOP_BOTTOM:
           mode = NV_STEREO_MODE_TOP_BOTTOM;
           break;
-        case STEREO_MODE_MONO:
+        case StereoMode::MONO:
           mode = NV_STEREO_MODE_MONO;
           break;
         }
 
         // Send control data even in mono case so driver knows to leave stereo mode.
         mDeviceManager->GetNv3DVUtils()->SendNv3DVControl(mode, true, FIREFOX_3DV_APP_HANDLE);
 
-        if (source->AsSourceD3D9()->GetStereoMode() != STEREO_MODE_MONO) {
+        if (source->AsSourceD3D9()->GetStereoMode() != StereoMode::MONO) {
           mDeviceManager->GetNv3DVUtils()->SendNv3DVControl(mode, true, FIREFOX_3DV_APP_HANDLE);
 
           nsRefPtr<IDirect3DSurface9> renderTarget;
           d3d9Device->GetRenderTarget(0, getter_AddRefs(renderTarget));
           mDeviceManager->GetNv3DVUtils()->SendNv3DVMetaData((unsigned int)aRect.width,
                                                              (unsigned int)aRect.height,
                                                              (HANDLE)(sourceY->GetD3D9Texture()),
                                                              (HANDLE)(renderTarget));
--- a/gfx/layers/d3d9/ImageLayerD3D9.cpp
+++ b/gfx/layers/d3d9/ImageLayerD3D9.cpp
@@ -322,30 +322,30 @@ ImageLayerD3D9::GetLayer()
   * If successful, aHasAlpha will be set to true if the texture has an
   * alpha component, false otherwise.
   */
 IDirect3DTexture9*
 ImageLayerD3D9::GetTexture(Image *aImage, bool& aHasAlpha)
 {
   NS_ASSERTION(aImage, "Null image.");
 
-  if (aImage->GetFormat() == REMOTE_IMAGE_BITMAP) {
+  if (aImage->GetFormat() == ImageFormat::REMOTE_IMAGE_BITMAP) {
     RemoteBitmapImage *remoteImage =
       static_cast<RemoteBitmapImage*>(aImage);
 
     if (!aImage->GetBackendData(mozilla::layers::LayersBackend::LAYERS_D3D9)) {
       nsAutoPtr<TextureD3D9BackendData> dat(new TextureD3D9BackendData());
       dat->mTexture = DataToTexture(device(), remoteImage->mData, remoteImage->mStride, remoteImage->mSize, D3DFMT_A8R8G8B8);
       if (dat->mTexture) {
         aImage->SetBackendData(mozilla::layers::LayersBackend::LAYERS_D3D9, dat.forget());
       }
     }
 
     aHasAlpha = remoteImage->mFormat == RemoteImageData::BGRA32;
-  } else if (aImage->GetFormat() == CAIRO_SURFACE) {
+  } else if (aImage->GetFormat() == ImageFormat::CAIRO_SURFACE) {
     CairoImage *cairoImage =
       static_cast<CairoImage*>(aImage);
 
     nsRefPtr<gfxASurface> surf = cairoImage->DeprecatedGetAsSurface();
     if (!surf) {
       return nullptr;
     }
 
@@ -353,17 +353,17 @@ ImageLayerD3D9::GetTexture(Image *aImage
       nsAutoPtr<TextureD3D9BackendData> dat(new TextureD3D9BackendData());
       dat->mTexture = SurfaceToTexture(device(), surf, cairoImage->GetSize());
       if (dat->mTexture) {
         aImage->SetBackendData(mozilla::layers::LayersBackend::LAYERS_D3D9, dat.forget());
       }
     }
 
     aHasAlpha = surf->GetContentType() == gfxContentType::COLOR_ALPHA;
-  } else if (aImage->GetFormat() == D3D9_RGB32_TEXTURE) {
+  } else if (aImage->GetFormat() == ImageFormat::D3D9_RGB32_TEXTURE) {
     if (!aImage->GetBackendData(mozilla::layers::LayersBackend::LAYERS_D3D9)) {
       // The texture in which the frame is stored belongs to DXVA's D3D9 device.
       // We need to open it on our device before we can use it.
       nsAutoPtr<TextureD3D9BackendData> backendData(new TextureD3D9BackendData());
       D3D9SurfaceImage* image = static_cast<D3D9SurfaceImage*>(aImage);
       backendData->mTexture = OpenSharedTexture(image->GetDesc(), image->GetShareHandle(), device());
       if (backendData->mTexture) {
         aImage->SetBackendData(mozilla::layers::LayersBackend::LAYERS_D3D9, backendData.forget());
@@ -405,21 +405,21 @@ ImageLayerD3D9::RenderLayer()
   if (!image) {
     return;
   }
 
   SetShaderTransformAndOpacity();
 
   gfx::IntSize size = image->GetSize();
 
-  if (image->GetFormat() == CAIRO_SURFACE ||
-      image->GetFormat() == REMOTE_IMAGE_BITMAP ||
-      image->GetFormat() == D3D9_RGB32_TEXTURE)
+  if (image->GetFormat() == ImageFormat::CAIRO_SURFACE ||
+      image->GetFormat() == ImageFormat::REMOTE_IMAGE_BITMAP ||
+      image->GetFormat() == ImageFormat::D3D9_RGB32_TEXTURE)
   {
-    NS_ASSERTION(image->GetFormat() != CAIRO_SURFACE ||
+    NS_ASSERTION(image->GetFormat() != ImageFormat::CAIRO_SURFACE ||
                  !static_cast<CairoImage*>(image)->mDeprecatedSurface ||
                  static_cast<CairoImage*>(image)->mDeprecatedSurface->GetContentType() != gfxContentType::ALPHA,
                  "Image layer has alpha image");
 
     bool hasAlpha = false;
     nsRefPtr<IDirect3DTexture9> texture = GetTexture(image, hasAlpha);
 
     device()->SetVertexShaderConstantF(CBvLayerQuad,
@@ -493,37 +493,37 @@ ImageLayerD3D9::RenderLayer()
     mD3DManager->SetShaderMode(DeviceManagerD3D9::YCBCRLAYER, GetMaskLayer());
 
     /*
      * Send 3d control data and metadata
      */
     if (mD3DManager->GetNv3DVUtils()) {
       Nv_Stereo_Mode mode;
       switch (yuvImage->GetData()->mStereoMode) {
-      case STEREO_MODE_LEFT_RIGHT:
+      case StereoMode::LEFT_RIGHT:
         mode = NV_STEREO_MODE_LEFT_RIGHT;
         break;
-      case STEREO_MODE_RIGHT_LEFT:
+      case StereoMode::RIGHT_LEFT:
         mode = NV_STEREO_MODE_RIGHT_LEFT;
         break;
-      case STEREO_MODE_BOTTOM_TOP:
+      case StereoMode::BOTTOM_TOP:
         mode = NV_STEREO_MODE_BOTTOM_TOP;
         break;
-      case STEREO_MODE_TOP_BOTTOM:
+      case StereoMode::TOP_BOTTOM:
         mode = NV_STEREO_MODE_TOP_BOTTOM;
         break;
-      case STEREO_MODE_MONO:
+      case StereoMode::MONO:
         mode = NV_STEREO_MODE_MONO;
         break;
       }
 
       // Send control data even in mono case so driver knows to leave stereo mode.
       mD3DManager->GetNv3DVUtils()->SendNv3DVControl(mode, true, FIREFOX_3DV_APP_HANDLE);
 
-      if (yuvImage->GetData()->mStereoMode != STEREO_MODE_MONO) {
+      if (yuvImage->GetData()->mStereoMode != StereoMode::MONO) {
         mD3DManager->GetNv3DVUtils()->SendNv3DVControl(mode, true, FIREFOX_3DV_APP_HANDLE);
 
         nsRefPtr<IDirect3DSurface9> renderTarget;
         device()->GetRenderTarget(0, getter_AddRefs(renderTarget));
         mD3DManager->GetNv3DVUtils()->SendNv3DVMetaData((unsigned int)yuvImage->GetSize().width,
                                                         (unsigned int)yuvImage->GetSize().height, (HANDLE)(data->mYTexture), (HANDLE)(renderTarget));
       }
     }
@@ -558,18 +558,18 @@ ImageLayerD3D9::GetAsTexture(gfx::IntSiz
   AutoLockImage autoLock(GetContainer());
 
   Image *image = autoLock.GetImage();
 
   if (!image) {
     return nullptr;
   }
 
-  if (image->GetFormat() != CAIRO_SURFACE &&
-      image->GetFormat() != REMOTE_IMAGE_BITMAP) {
+  if (image->GetFormat() != ImageFormat::CAIRO_SURFACE &&
+      image->GetFormat() != ImageFormat::REMOTE_IMAGE_BITMAP) {
     return nullptr;
   }
 
   bool dontCare;
   *aSize = image->GetSize();
   nsRefPtr<IDirect3DTexture9> result = GetTexture(image, dontCare);
   return result.forget();
 }
--- a/gfx/layers/d3d9/TextureD3D9.cpp
+++ b/gfx/layers/d3d9/TextureD3D9.cpp
@@ -1032,17 +1032,17 @@ DataTextureSourceD3D9::DataTextureSource
   , mCompositor(aCompositor)
   , mCurrentTile(0)
   , mFlags(aFlags)
   , mIsTiled(false)
   , mIterating(false)
 {
   mSize = aSize;
   mTexture = aTexture;
-  mStereoMode = STEREO_MODE_MONO;
+  mStereoMode = StereoMode::MONO;
   MOZ_COUNT_CTOR(DataTextureSourceD3D9);
 }
 
 DataTextureSourceD3D9::~DataTextureSourceD3D9()
 {
   MOZ_COUNT_DTOR(DataTextureSourceD3D9);
 }
 
--- a/gfx/layers/d3d9/TextureD3D9.h
+++ b/gfx/layers/d3d9/TextureD3D9.h
@@ -102,17 +102,17 @@ protected:
 class DataTextureSourceD3D9 : public DataTextureSource
                             , public TextureSourceD3D9
                             , public TileIterator
 {
 public:
   DataTextureSourceD3D9(gfx::SurfaceFormat aFormat,
                         CompositorD3D9* aCompositor,
                         TextureFlags aFlags = TEXTURE_FLAGS_DEFAULT,
-                        StereoMode aStereoMode = STEREO_MODE_MONO);
+                        StereoMode aStereoMode = StereoMode::MONO);
 
   DataTextureSourceD3D9(gfx::SurfaceFormat aFormat,
                         gfx::IntSize aSize,
                         CompositorD3D9* aCompositor,
                         IDirect3DTexture9* aTexture,
                         TextureFlags aFlags = TEXTURE_FLAGS_DEFAULT);
 
   virtual ~DataTextureSourceD3D9();
--- a/gfx/layers/ipc/SharedRGBImage.cpp
+++ b/gfx/layers/ipc/SharedRGBImage.cpp
@@ -23,17 +23,17 @@
 
 // Just big enough for a 1080p RGBA32 frame
 #define MAX_FRAME_SIZE (16 * 1024 * 1024)
 
 namespace mozilla {
 namespace layers {
 
 DeprecatedSharedRGBImage::DeprecatedSharedRGBImage(ISurfaceAllocator *aAllocator) :
-  Image(nullptr, SHARED_RGB),
+  Image(nullptr, ImageFormat::SHARED_RGB),
   mSize(0, 0),
   mSurfaceAllocator(aAllocator),
   mAllocated(false),
   mShmem(new ipc::Shmem())
 {
   MOZ_COUNT_CTOR(DeprecatedSharedRGBImage);
 }
 
@@ -59,17 +59,17 @@ CreateSharedRGBImage(ImageContainer *aIm
                aImageFormat == gfxImageFormat::RGB16_565,
                "RGB formats supported only");
 
   if (!aImageContainer) {
     NS_WARNING("No ImageContainer to allocate DeprecatedSharedRGBImage");
     return nullptr;
   }
 
-  ImageFormat format = SHARED_RGB;
+  ImageFormat format = ImageFormat::SHARED_RGB;
   nsRefPtr<Image> image = aImageContainer->CreateImage(&format, 1);
 
   if (!image) {
     NS_WARNING("Failed to create DeprecatedSharedRGBImage");
     return nullptr;
   }
 
   if (gfxPlatform::GetPlatform()->UseDeprecatedTextures()) {
@@ -180,17 +180,17 @@ DeprecatedSharedRGBImage::FromSurfaceDes
   const RGBImage& rgb = aDescriptor.get_RGBImage();
   if (rgb.owner() == 0) {
     return nullptr;
   }
   return reinterpret_cast<DeprecatedSharedRGBImage*>(rgb.owner());
 }
 
 SharedRGBImage::SharedRGBImage(ImageClient* aCompositable)
-: Image(nullptr, SHARED_RGB)
+: Image(nullptr, ImageFormat::SHARED_RGB)
 , mCompositable(aCompositable)
 {
   MOZ_COUNT_CTOR(SharedRGBImage);
 }
 
 SharedRGBImage::~SharedRGBImage()
 {
   MOZ_COUNT_DTOR(SharedRGBImage);
--- a/gfx/tests/gtest/TestTextures.cpp
+++ b/gfx/tests/gtest/TestTextures.cpp
@@ -242,17 +242,17 @@ TEST(Layers, TextureYCbCrSerialization) 
   clientData.mYChannel = ySurface->Data();
   clientData.mCbChannel = cbSurface->Data();
   clientData.mCrChannel = crSurface->Data();
   clientData.mYSize = ySurface->GetSize().ToIntSize();
   clientData.mPicSize = ySurface->GetSize().ToIntSize();
   clientData.mCbCrSize = cbSurface->GetSize().ToIntSize();
   clientData.mYStride = ySurface->Stride();
   clientData.mCbCrStride = cbSurface->Stride();
-  clientData.mStereoMode = STEREO_MODE_MONO;
+  clientData.mStereoMode = StereoMode::MONO;
   clientData.mYSkip = 0;
   clientData.mCbSkip = 0;
   clientData.mCrSkip = 0;
   clientData.mCrSkip = 0;
   clientData.mPicX = 0;
   clientData.mPicX = 0;
 
   RefPtr<TextureClient> client
--- a/image/src/RasterImage.cpp
+++ b/image/src/RasterImage.cpp
@@ -953,17 +953,17 @@ RasterImage::GetCurrentImage()
   }
 
   CairoImage::Data cairoData;
   cairoData.mDeprecatedSurface = imageSurface;
   GetWidth(&cairoData.mSize.width);
   GetHeight(&cairoData.mSize.height);
   cairoData.mSourceSurface = gfxPlatform::GetPlatform()->GetSourceSurfaceForSurface(nullptr, imageSurface);
 
-  ImageFormat cairoFormat = CAIRO_SURFACE;
+  ImageFormat cairoFormat = ImageFormat::CAIRO_SURFACE;
   nsRefPtr<layers::Image> image = mImageContainer->CreateImage(&cairoFormat, 1);
   NS_ASSERTION(image, "Failed to create Image");
 
   NS_ASSERTION(image->GetFormat() == cairoFormat, "Wrong format");
   static_cast<CairoImage*>(image.get())->SetData(cairoData);
 
   return image.forget();
 }
--- a/layout/base/FrameLayerBuilder.cpp
+++ b/layout/base/FrameLayerBuilder.cpp
@@ -3863,17 +3863,17 @@ ContainerState::SetupMaskLayer(Layer *aL
     aClip.DrawRoundedRectsTo(context,
                              newData.mAppUnitsPerDevPixel,
                              0,
                              aRoundedRectClipCount);
 
     // build the image and container
     container = aLayer->Manager()->CreateImageContainer();
     NS_ASSERTION(container, "Could not create image container for mask layer.");
-    static const ImageFormat format = CAIRO_SURFACE;
+    static const ImageFormat format = ImageFormat::CAIRO_SURFACE;
     nsRefPtr<Image> image = container->CreateImage(&format, 1);
     NS_ASSERTION(image, "Could not create image container for mask layer.");
     CairoImage::Data data;
     data.mDeprecatedSurface = surface;
     data.mSize = surfaceSizeInt;
     data.mSourceSurface = gfxPlatform::GetPlatform()->GetSourceSurfaceForSurface(nullptr, surface);
     static_cast<CairoImage*>(image.get())->SetData(data);
     container->SetCurrentImageInTransaction(image);
--- a/media/webrtc/signaling/src/mediapipeline/MediaPipeline.cpp
+++ b/media/webrtc/signaling/src/mediapipeline/MediaPipeline.cpp
@@ -892,33 +892,33 @@ void MediaPipelineTransmit::PipelineList
   int32_t serial = img->GetSerial();
   if (serial == last_img_) {
     return;
   }
   last_img_ = serial;
 
   ImageFormat format = img->GetFormat();
 #ifdef MOZ_WIDGET_GONK
-  if (format == GRALLOC_PLANAR_YCBCR) {
+  if (format == ImageFormat::GRALLOC_PLANAR_YCBCR) {
     layers::GrallocImage *nativeImage = static_cast<layers::GrallocImage*>(img);
     layers::SurfaceDescriptor handle = nativeImage->GetSurfaceDescriptor();
     layers::SurfaceDescriptorGralloc grallocHandle = handle.get_SurfaceDescriptorGralloc();
 
     android::sp<android::GraphicBuffer> graphicBuffer = layers::GrallocBufferActor::GetFrom(grallocHandle);
     void *basePtr;
     graphicBuffer->lock(android::GraphicBuffer::USAGE_SW_READ_MASK, &basePtr);
     conduit->SendVideoFrame(static_cast<unsigned char*>(basePtr),
                             (graphicBuffer->getWidth() * graphicBuffer->getHeight() * 3) / 2,
                             graphicBuffer->getWidth(),
                             graphicBuffer->getHeight(),
                             mozilla::kVideoNV21, 0);
     graphicBuffer->unlock();
   } else
 #endif
-  if (format == PLANAR_YCBCR) {
+  if (format == ImageFormat::PLANAR_YCBCR) {
     // Cast away constness b/c some of the accessors are non-const
     layers::PlanarYCbCrImage* yuv =
     const_cast<layers::PlanarYCbCrImage *>(
           static_cast<const layers::PlanarYCbCrImage *>(img));
     // Big-time assumption here that this is all contiguous data coming
     // from getUserMedia or other sources.
     const layers::PlanarYCbCrData *data = yuv->GetData();
 
@@ -941,17 +941,17 @@ void MediaPipelineTransmit::PipelineList
     // GrallocImage can have wider strides, and so in some cases
     // would encode as garbage.  If we need to encode it we'll either want to
     // modify SendVideoFrame or copy/move the data in the buffer.
 
     // OK, pass it on to the conduit
     MOZ_MTLOG(ML_DEBUG, "Sending a video frame");
     // Not much for us to do with an error
     conduit->SendVideoFrame(y, length, width, height, mozilla::kVideoI420, 0);
-  } else if(format == CAIRO_SURFACE) {
+  } else if(format == ImageFormat::CAIRO_SURFACE) {
     layers::CairoImage* rgb =
     const_cast<layers::CairoImage *>(
           static_cast<const layers::CairoImage *>(img));
 
     gfx::IntSize size = rgb->GetSize();
     int half_width = (size.width + 1) >> 1;
     int half_height = (size.height + 1) >> 1;
     int c_size = half_width * half_height;
@@ -1194,19 +1194,19 @@ void MediaPipelineReceiveVideo::Pipeline
     unsigned int buffer_size,
     uint32_t time_stamp,
     int64_t render_time) {
 #ifdef MOZILLA_INTERNAL_API
   ReentrantMonitorAutoEnter enter(monitor_);
 
   // Create a video frame and append it to the track.
 #ifdef MOZ_WIDGET_GONK
-  ImageFormat format = GRALLOC_PLANAR_YCBCR;
+  ImageFormat format = ImageFormat::GRALLOC_PLANAR_YCBCR;
 #else
-  ImageFormat format = PLANAR_YCBCR;
+  ImageFormat format = ImageFormat::PLANAR_YCBCR;
 #endif
   nsRefPtr<layers::Image> image = image_container_->CreateImage(&format, 1);
 
   layers::PlanarYCbCrImage* videoImage = static_cast<layers::PlanarYCbCrImage*>(image.get());
   uint8_t* frame = const_cast<uint8_t*>(static_cast<const uint8_t*> (buffer));
   const uint8_t lumaBpp = 8;
   const uint8_t chromaBpp = 4;
 
@@ -1216,17 +1216,17 @@ void MediaPipelineReceiveVideo::Pipeline
   data.mYStride = width_ * lumaBpp/ 8;
   data.mCbCrStride = width_ * chromaBpp / 8;
   data.mCbChannel = frame + height_ * data.mYStride;
   data.mCrChannel = data.mCbChannel + height_ * data.mCbCrStride / 2;
   data.mCbCrSize = IntSize(width_/ 2, height_/ 2);
   data.mPicX = 0;
   data.mPicY = 0;
   data.mPicSize = IntSize(width_, height_);
-  data.mStereoMode = STEREO_MODE_MONO;
+  data.mStereoMode = StereoMode::MONO;
 
   videoImage->SetData(data);
 
   image_ = image.forget();
 #endif
 }
 
 void MediaPipelineReceiveVideo::PipelineListener::
--- a/media/webrtc/signaling/src/peerconnection/PeerConnectionMedia.h
+++ b/media/webrtc/signaling/src/peerconnection/PeerConnectionMedia.h
@@ -113,17 +113,17 @@ class Fake_VideoGenerator {
 
   static void Callback(nsITimer* timer, void *arg) {
     Fake_VideoGenerator* gen = static_cast<Fake_VideoGenerator*>(arg);
 
     const uint32_t WIDTH = 640;
     const uint32_t HEIGHT = 480;
 
     // Allocate a single blank Image
-    mozilla::ImageFormat format = mozilla::PLANAR_YCBCR;
+    mozilla::ImageFormat format = mozilla::ImageFormat::PLANAR_YCBCR;
     nsRefPtr<mozilla::layers::ImageContainer> container =
       mozilla::layers::LayerManager::CreateImageContainer();
 
     nsRefPtr<mozilla::layers::Image> image = container->CreateImage(&format, 1);
 
     int len = ((WIDTH * HEIGHT) * 3 / 2);
     mozilla::layers::PlanarYCbCrImage* planar =
       static_cast<mozilla::layers::PlanarYCbCrImage*>(image.get());
@@ -140,17 +140,17 @@ class Fake_VideoGenerator {
     data.mYStride = (int32_t) (WIDTH * lumaBpp / 8.0);
     data.mCbCrStride = (int32_t) (WIDTH * chromaBpp / 8.0);
     data.mCbChannel = frame + HEIGHT * data.mYStride;
     data.mCrChannel = data.mCbChannel + HEIGHT * data.mCbCrStride / 2;
     data.mCbCrSize = IntSize(WIDTH / 2, HEIGHT / 2);
     data.mPicX = 0;
     data.mPicY = 0;
     data.mPicSize = IntSize(WIDTH, HEIGHT);
-    data.mStereoMode = mozilla::STEREO_MODE_MONO;
+    data.mStereoMode = mozilla::StereoMode::MONO;
 
     // SetData copies data, so we can free the frame
     planar->SetData(data);
     PR_Free(frame);
 
     // AddTrack takes ownership of segment
     mozilla::VideoSegment *segment = new mozilla::VideoSegment();
     // 10 fps.
--- a/media/webrtc/signaling/test/FakeMediaStreamsImpl.h
+++ b/media/webrtc/signaling/test/FakeMediaStreamsImpl.h
@@ -149,17 +149,17 @@ Fake_VideoStreamSource::Notify(nsITimer*
   data.mYStride = WIDTH * lumaBpp / 8.0;
   data.mCbCrStride = WIDTH * chromaBpp / 8.0;
   data.mCbChannel = frame + HEIGHT * data.mYStride;
   data.mCrChannel = data.mCbChannel + HEIGHT * data.mCbCrStride / 2;
   data.mCbCrSize = gfxIntSize(WIDTH / 2, HEIGHT / 2);
   data.mPicX = 0;
   data.mPicY = 0;
   data.mPicSize = gfxIntSize(WIDTH, HEIGHT);
-  data.mStereoMode = mozilla::layers::STEREO_MODE_MONO;
+  data.mStereoMode = mozilla::layers::StereoMode::MONO;
 
   mozilla::VideoSegment segment;
   segment.AppendFrame(image.forget(), USECS_PER_S / FPS, gfxIntSize(WIDTH, HEIGHT));
 
   // TODO(ekr@rtfm.com): are we leaking?
 #endif
 
   return NS_OK;
@@ -177,17 +177,17 @@ void mozilla::layers::BufferRecycleBin::
 }
 
 uint8_t *mozilla::layers::BufferRecycleBin::GetBuffer(uint32_t size) {
   return (uint8_t *)PR_MALLOC(size);
 }
 
 // YCbCrImage constructor (from ImageLayers.cpp)
 mozilla::layers::PlanarYCbCrImage::PlanarYCbCrImage(BufferRecycleBin *aRecycleBin)
-  : Image(nsnull, PLANAR_YCBCR)
+  : Image(nsnull, ImageFormat::PLANAR_YCBCR)
   , mBufferSize(0)
   , mRecycleBin(aRecycleBin)
 {
 }
 
 
 #endif
 #endif