Bug 759506 - Add support for zero-copy OMX hardware decoding to B2G r=doublec
authorEdwin Flores <eflores@mozilla.com>
Thu, 27 Sep 2012 16:33:43 +1200
changeset 108255 69d3a830885120643b0d9f77170fda41c73bd906
parent 108254 3876b8007889385c0a334138e4fadec93d22c1ae
child 108256 18becb73b9064402281e49313dd3bf9abefdd1a3
push id23552
push userryanvm@gmail.com
push dateFri, 28 Sep 2012 03:05:08 +0000
treeherdermozilla-central@2d96ee8d9dd4 [default view] [failures only]
perfherder[talos] [build metrics] [platform microbench] (compared to previous push)
reviewersdoublec
bugs759506
milestone18.0a1
first release with
nightly linux32
nightly linux64
nightly mac
nightly win32
nightly win64
last release without
nightly linux32
nightly linux64
nightly mac
nightly win32
nightly win64
Bug 759506 - Add support for zero-copy OMX hardware decoding to B2G r=doublec
content/html/content/public/nsHTMLMediaElement.h
content/html/content/src/nsHTMLMediaElement.cpp
content/media/Makefile.in
content/media/VideoFrameContainer.cpp
content/media/nsBuiltinDecoderReader.cpp
content/media/nsBuiltinDecoderReader.h
content/media/nsBuiltinDecoderStateMachine.cpp
content/media/omx/MPAPI.h
content/media/omx/Makefile.in
content/media/omx/OmxDecoder.cpp
content/media/omx/OmxDecoder.h
content/media/omx/nsMediaOmxDecoder.cpp
content/media/omx/nsMediaOmxDecoder.h
content/media/omx/nsMediaOmxReader.cpp
content/media/omx/nsMediaOmxReader.h
--- a/content/html/content/public/nsHTMLMediaElement.h
+++ b/content/html/content/public/nsHTMLMediaElement.h
@@ -302,16 +302,23 @@ public:
 
 #ifdef MOZ_GSTREAMER
   static bool IsH264Enabled();
   static bool IsH264Type(const nsACString& aType);
   static const char gH264Types[3][16];
   static char const *const gH264Codecs[7];
 #endif
 
+#ifdef MOZ_WIDGET_GONK
+  static bool IsOmxEnabled();
+  static bool IsH264Type(const nsACString& aType);
+  static const char gH264Types[3][16];
+  static char const *const gH264Codecs[7];
+#endif
+
 #ifdef MOZ_MEDIA_PLUGINS
   static bool IsMediaPluginsEnabled();
   static bool IsMediaPluginsType(const nsACString& aType);
 #endif
 
   /**
    * Get the mime type for this element.
    */
--- a/content/html/content/src/nsHTMLMediaElement.cpp
+++ b/content/html/content/src/nsHTMLMediaElement.cpp
@@ -1,16 +1,17 @@
 /* -*- Mode: C++; tab-width: 2; indent-tabs-mode: nil; c-basic-offset: 2 -*- */
 /* vim:set ts=2 sw=2 sts=2 et cindent: */
 /* This Source Code Form is subject to the terms of the Mozilla Public
  * License, v. 2.0. If a copy of the MPL was not distributed with this
  * file, You can obtain one at http://mozilla.org/MPL/2.0/. */
 
 #include "mozilla/Util.h"
 
+#include "base/basictypes.h"
 #include "nsIDOMHTMLMediaElement.h"
 #include "nsIDOMHTMLSourceElement.h"
 #include "nsHTMLMediaElement.h"
 #include "nsTimeRanges.h"
 #include "nsGenericHTMLElement.h"
 #include "nsPresContext.h"
 #include "nsIPresShell.h"
 #include "nsGkAtoms.h"
@@ -82,16 +83,19 @@
 #endif
 #ifdef MOZ_GSTREAMER
 #include "nsGStreamerDecoder.h"
 #endif
 #ifdef MOZ_MEDIA_PLUGINS
 #include "nsMediaPluginHost.h"
 #include "nsMediaPluginDecoder.h"
 #endif
+#ifdef MOZ_WIDGET_GONK
+#include "nsMediaOmxDecoder.h"
+#endif
 
 #ifdef PR_LOGGING
 static PRLogModuleInfo* gMediaElementLog;
 static PRLogModuleInfo* gMediaElementEventsLog;
 #define LOG(type, msg) PR_LOG(gMediaElementLog, type, msg)
 #define LOG_EVENT(type, msg) PR_LOG(gMediaElementEventsLog, type, msg)
 #else
 #define LOG(type, msg)
@@ -2120,33 +2124,35 @@ nsHTMLMediaElement::IsWebMType(const nsA
       return true;
     }
   }
 
   return false;
 }
 #endif
 
-#ifdef MOZ_GSTREAMER
+#if defined(MOZ_GSTREAMER) || defined(MOZ_WIDGET_GONK)
 const char nsHTMLMediaElement::gH264Types[3][16] = {
   "video/mp4",
   "video/3gpp",
   "video/quicktime",
 };
 
 char const *const nsHTMLMediaElement::gH264Codecs[7] = {
   "avc1.42E01E",
   "avc1.42001E",
   "avc1.58A01E",
   "avc1.4D401E",
   "avc1.64001E",
   "mp4a.40.2",
   nullptr
 };
-
+#endif
+
+#ifdef MOZ_GSTREAMER
 bool
 nsHTMLMediaElement::IsH264Enabled()
 {
   return Preferences::GetBool("media.h264.enabled");
 }
 
 bool
 nsHTMLMediaElement::IsH264Type(const nsACString& aType)
@@ -2160,16 +2166,40 @@ nsHTMLMediaElement::IsH264Type(const nsA
       return true;
     }
   }
 
   return false;
 }
 #endif
 
+#ifdef MOZ_WIDGET_GONK
+bool
+nsHTMLMediaElement::IsOmxEnabled()
+{
+  return Preferences::GetBool("media.omx.enabled", false);
+}
+
+bool
+nsHTMLMediaElement::IsH264Type(const nsACString& aType)
+{
+  if (!IsOmxEnabled()) {
+    return false;
+  }
+
+  for (uint32_t i = 0; i < ArrayLength(gH264Types); ++i) {
+    if (aType.EqualsASCII(gH264Types[i])) {
+      return true;
+    }
+  }
+
+  return false;
+}
+#endif
+
 #ifdef MOZ_MEDIA_PLUGINS
 bool
 nsHTMLMediaElement::IsMediaPluginsEnabled()
 {
   return Preferences::GetBool("media.plugins.enabled");
 }
 
 bool
@@ -2222,16 +2252,22 @@ nsHTMLMediaElement::CanHandleMediaType(c
 #endif
 
 #ifdef MOZ_GSTREAMER
   if (IsH264Type(nsDependentCString(aMIMEType))) {
     *aCodecList = gH264Codecs;
     return CANPLAY_MAYBE;
   }
 #endif
+#ifdef MOZ_WIDGET_GONK
+  if (IsH264Type(nsDependentCString(aMIMEType))) {
+    *aCodecList = gH264Codecs;
+    return CANPLAY_MAYBE;
+  }
+#endif
 #ifdef MOZ_MEDIA_PLUGINS
   if (IsMediaPluginsEnabled() && GetMediaPluginHost()->FindDecoder(nsDependentCString(aMIMEType), aCodecList))
     return CANPLAY_MAYBE;
 #endif
   return CANPLAY_NO;
 }
 
 /* static */
@@ -2248,16 +2284,21 @@ bool nsHTMLMediaElement::ShouldHandleMed
 #ifdef MOZ_WEBM
   if (IsWebMType(nsDependentCString(aMIMEType)))
     return true;
 #endif
 #ifdef MOZ_GSTREAMER
   if (IsH264Type(nsDependentCString(aMIMEType)))
     return true;
 #endif
+#ifdef MOZ_WIDGET_GONK
+  if (IsH264Type(nsDependentCString(aMIMEType))) {
+    return true;
+  }
+#endif
 #ifdef MOZ_MEDIA_PLUGINS
   if (IsMediaPluginsEnabled() && GetMediaPluginHost()->FindDecoder(nsDependentCString(aMIMEType), NULL))
     return true;
 #endif
   // We should not return true for Wave types, since there are some
   // Wave codecs actually in use in the wild that we don't support, and
   // we should allow those to be handled by plugins or helper apps.
   // Furthermore people can play Wave files on most platforms by other
@@ -2364,16 +2405,24 @@ nsHTMLMediaElement::CreateDecoder(const 
 #ifdef MOZ_WAVE
   if (IsWaveType(aType)) {
     nsRefPtr<nsWaveDecoder> decoder = new nsWaveDecoder();
     if (decoder->Init(this)) {
       return decoder.forget();
     }
   }
 #endif
+#ifdef MOZ_WIDGET_GONK
+  if (IsH264Type(aType)) {
+    nsRefPtr<nsMediaOmxDecoder> decoder = new nsMediaOmxDecoder();
+    if (decoder->Init(this)) {
+      return decoder.forget();
+    }
+  }
+#endif
 #ifdef MOZ_MEDIA_PLUGINS
   if (IsMediaPluginsEnabled() && GetMediaPluginHost()->FindDecoder(aType, NULL)) {
     nsRefPtr<nsMediaPluginDecoder> decoder = new nsMediaPluginDecoder(aType);
     if (decoder->Init(this)) {
       return decoder.forget();
     }
   }
 #endif
--- a/content/media/Makefile.in
+++ b/content/media/Makefile.in
@@ -85,16 +85,20 @@ PARALLEL_DIRS += gstreamer
 endif
 
 ifdef MOZ_MEDIA_PLUGINS
 PARALLEL_DIRS += plugins
 endif
 
 PARALLEL_DIRS += webrtc
 
+ifeq (gonk,$(MOZ_WIDGET_TOOLKIT))
+PARALLEL_DIRS += omx
+endif
+
 TEST_DIRS += test
 
 FORCE_STATIC_LIB = 1
 
 include $(topsrcdir)/config/config.mk
 include $(topsrcdir)/ipc/chromium/chromium-config.mk
 include $(topsrcdir)/config/rules.mk
 
--- a/content/media/VideoFrameContainer.cpp
+++ b/content/media/VideoFrameContainer.cpp
@@ -41,16 +41,27 @@ void VideoFrameContainer::SetCurrentFram
     mIntrinsicSizeChanged = true;
   }
 
   gfxIntSize oldFrameSize = mImageContainer->GetCurrentSize();
   TimeStamp lastPaintTime = mImageContainer->GetPaintTime();
   if (!lastPaintTime.IsNull() && !mPaintTarget.IsNull()) {
     mPaintDelay = lastPaintTime - mPaintTarget;
   }
+
+  // When using the OMX decoder, destruction of the current image can indirectly
+  //  block on main thread I/O. If we let this happen while holding onto
+  //  |mImageContainer|'s lock, then when the main thread then tries to
+  //  composite it can then block on |mImageContainer|'s lock, causing a
+  //  deadlock. We use this hack to defer the destruction of the current image
+  //  until it is safe.
+  nsRefPtr<Image> kungFuDeathGrip;
+  kungFuDeathGrip = mImageContainer->LockCurrentImage();
+  mImageContainer->UnlockCurrentImage();
+
   mImageContainer->SetCurrentImage(aImage);
   gfxIntSize newFrameSize = mImageContainer->GetCurrentSize();
   if (oldFrameSize != newFrameSize) {
     mImageSizeChanged = true;
   }
 
   mPaintTarget = aTargetTime;
 }
--- a/content/media/nsBuiltinDecoderReader.cpp
+++ b/content/media/nsBuiltinDecoderReader.cpp
@@ -1,14 +1,15 @@
 /* -*- Mode: C++; tab-width: 2; indent-tabs-mode: nil; c-basic-offset: 2 -*- */
 /* vim:set ts=2 sw=2 sts=2 et cindent: */
 /* This Source Code Form is subject to the terms of the Mozilla Public
  * License, v. 2.0. If a copy of the MPL was not distributed with this
  * file, You can obtain one at http://mozilla.org/MPL/2.0/. */
 
+#include "GonkIOSurfaceImage.h"
 #include "nsBuiltinDecoder.h"
 #include "nsBuiltinDecoderReader.h"
 #include "nsBuiltinDecoderStateMachine.h"
 #include "VideoUtils.h"
 #include "ImageContainer.h"
 
 #include "mozilla/mozalloc.h"
 #include "mozilla/StandardInteger.h"
@@ -233,16 +234,84 @@ VideoData* VideoData::Create(nsVideoInfo
   data.mPicSize = gfxIntSize(aPicture.width, aPicture.height);
   data.mStereoMode = aInfo.mStereoMode;
 
   videoImage->SetDelayedConversion(true);
   videoImage->SetData(data);
   return v.forget();
 }
 
+#ifdef MOZ_WIDGET_GONK
+VideoData* VideoData::Create(nsVideoInfo& aInfo,
+                             ImageContainer* aContainer,
+                             int64_t aOffset,
+                             int64_t aTime,
+                             int64_t aEndTime,
+                             mozilla::layers::GraphicBufferLocked *aBuffer,
+                             bool aKeyframe,
+                             int64_t aTimecode,
+                             nsIntRect aPicture)
+{
+  if (!aContainer) {
+    // Create a dummy VideoData with no image. This gives us something to
+    // send to media streams if necessary.
+    nsAutoPtr<VideoData> v(new VideoData(aOffset,
+                                         aTime,
+                                         aEndTime,
+                                         aKeyframe,
+                                         aTimecode,
+                                         aInfo.mDisplay));
+    return v.forget();
+  }
+
+  // The following situations could be triggered by invalid input
+  if (aPicture.width <= 0 || aPicture.height <= 0) {
+    NS_WARNING("Empty picture rect");
+    return nullptr;
+  }
+
+  // Ensure the picture size specified in the headers can be extracted out of
+  // the frame we've been supplied without indexing out of bounds.
+  CheckedUint32 xLimit = aPicture.x + CheckedUint32(aPicture.width);
+  CheckedUint32 yLimit = aPicture.y + CheckedUint32(aPicture.height);
+  if (!xLimit.isValid() || !yLimit.isValid())
+  {
+    // The specified picture dimensions can't be contained inside the video
+    // frame, we'll stomp memory if we try to copy it. Fail.
+    NS_WARNING("Overflowing picture rect");
+    return nullptr;
+  }
+
+  nsAutoPtr<VideoData> v(new VideoData(aOffset,
+                                       aTime,
+                                       aEndTime,
+                                       aKeyframe,
+                                       aTimecode,
+                                       aInfo.mDisplay));
+
+  ImageFormat format = GONK_IO_SURFACE;
+  v->mImage = aContainer->CreateImage(&format, 1);
+  if (!v->mImage) {
+    return nullptr;
+  }
+  NS_ASSERTION(v->mImage->GetFormat() == GONK_IO_SURFACE,
+               "Wrong format?");
+  typedef mozilla::layers::GonkIOSurfaceImage GonkIOSurfaceImage;
+  GonkIOSurfaceImage* videoImage = static_cast<GonkIOSurfaceImage*>(v->mImage.get());
+  GonkIOSurfaceImage::Data data;
+
+  data.mPicSize = gfxIntSize(aPicture.width, aPicture.height);
+  data.mGraphicBuffer = aBuffer;
+
+  videoImage->SetData(data);
+
+  return v.forget();
+}
+#endif  // MOZ_WIDGET_GONK
+
 void* nsBuiltinDecoderReader::VideoQueueMemoryFunctor::operator()(void* anObject) {
   const VideoData* v = static_cast<const VideoData*>(anObject);
   if (!v->mImage) {
     return nullptr;
   }
   NS_ASSERTION(v->mImage->GetFormat() == PLANAR_YCBCR,
                "Wrong format?");
   mozilla::layers::PlanarYCbCrImage* vi = static_cast<mozilla::layers::PlanarYCbCrImage*>(v->mImage.get());
--- a/content/media/nsBuiltinDecoderReader.h
+++ b/content/media/nsBuiltinDecoderReader.h
@@ -116,16 +116,22 @@ public:
   const uint32_t mChannels;
   // At least one of mAudioBuffer/mAudioData must be non-null.
   // mChannels channels, each with mFrames frames
   nsRefPtr<SharedBuffer> mAudioBuffer;
   // mFrames frames, each with mChannels values
   nsAutoArrayPtr<AudioDataValue> mAudioData;
 };
 
+namespace mozilla {
+namespace layers {
+class GraphicBufferLocked;
+}
+}
+
 // Holds a decoded video frame, in YCbCr format. These are queued in the reader.
 class VideoData {
 public:
   typedef mozilla::layers::ImageContainer ImageContainer;
   typedef mozilla::layers::Image Image;
 
   // YCbCr data obtained from decoding the video. The index's are:
   //   0 = Y
@@ -155,16 +161,26 @@ public:
                            int64_t aOffset,
                            int64_t aTime,
                            int64_t aEndTime,
                            const YCbCrBuffer &aBuffer,
                            bool aKeyframe,
                            int64_t aTimecode,
                            nsIntRect aPicture);
 
+  static VideoData* Create(nsVideoInfo& aInfo,
+                           ImageContainer* aContainer,
+                           int64_t aOffset,
+                           int64_t aTime,
+                           int64_t aEndTime,
+                           mozilla::layers::GraphicBufferLocked *aBuffer,
+                           bool aKeyframe,
+                           int64_t aTimecode,
+                           nsIntRect aPicture);
+
   // Constructs a duplicate VideoData object. This intrinsically tells the
   // player that it does not need to update the displayed frame when this
   // frame is played; this frame is identical to the previous.
   static VideoData* CreateDuplicate(int64_t aOffset,
                                     int64_t aTime,
                                     int64_t aEndTime,
                                     int64_t aTimecode)
   {
--- a/content/media/nsBuiltinDecoderStateMachine.cpp
+++ b/content/media/nsBuiltinDecoderStateMachine.cpp
@@ -58,17 +58,25 @@ const uint32_t SILENCE_BYTES_CHUNK = 32 
 // If we have fewer than LOW_VIDEO_FRAMES decoded frames, and
 // we're not "pumping video", we'll skip the video up to the next keyframe
 // which is at or after the current playback position.
 static const uint32_t LOW_VIDEO_FRAMES = 1;
 
 // If we've got more than AMPLE_VIDEO_FRAMES decoded video frames waiting in
 // the video queue, we will not decode any more video frames until some have
 // been consumed by the play state machine thread.
+#ifdef MOZ_WIDGET_GONK
+// On B2G this is decided by a similar value which varies for each OMX decoder
+// |OMX_PARAM_PORTDEFINITIONTYPE::nBufferCountMin|. This number must be less
+// than the OMX equivalent or gecko will think it is chronically starved of
+// video frames. All decoders seen so far have a value of at least 4.
+static const uint32_t AMPLE_VIDEO_FRAMES = 3;
+#else
 static const uint32_t AMPLE_VIDEO_FRAMES = 10;
+#endif
 
 // Arbitrary "frame duration" when playing only audio.
 static const int AUDIO_DURATION_USECS = 40000;
 
 // If we increase our "low audio threshold" (see LOW_AUDIO_USECS above), we
 // use this as a factor in all our calculations. Increasing this will cause
 // us to be more likely to increase our low audio threshold, and to
 // increase it by more.
new file mode 100644
--- /dev/null
+++ b/content/media/omx/MPAPI.h
@@ -0,0 +1,133 @@
+/* -*- Mode: C++; tab-width: 2; indent-tabs-mode: nil; c-basic-offset: 2 -*- */
+/* vim:set ts=2 sw=2 sts=2 et cindent: */
+/* This Source Code Form is subject to the terms of the Mozilla Public
+ * License, v. 2.0. If a copy of the MPL was not distributed with this file,
+ * You can obtain one at http://mozilla.org/MPL/2.0/. */
+#if !defined(MPAPI_h_)
+#define MPAPI_h_
+
+#include <stdint.h>
+#include "GonkIOSurfaceImage.h"
+
+namespace MPAPI {
+
+struct VideoPlane {
+  void *mData;
+  int32_t mStride;
+  int32_t mWidth;
+  int32_t mHeight;
+  int32_t mOffset;
+  int32_t mSkip;
+};
+
+struct VideoFrame {
+  int64_t mTimeUs;
+  int64_t mEndTimeUs;
+  bool mKeyFrame;
+  void *mData;
+  size_t mSize;
+  int32_t mStride;
+  int32_t mSliceHeight;
+  int32_t mRotation;
+  VideoPlane Y;
+  VideoPlane Cb;
+  VideoPlane Cr;
+  nsRefPtr<mozilla::layers::GraphicBufferLocked> mGraphicBuffer;
+
+  void Set(int64_t aTimeUs, bool aKeyFrame,
+           void *aData, size_t aSize, int32_t aStride, int32_t aSliceHeight, int32_t aRotation,
+           void *aYData, int32_t aYStride, int32_t aYWidth, int32_t aYHeight, int32_t aYOffset, int32_t aYSkip,
+           void *aCbData, int32_t aCbStride, int32_t aCbWidth, int32_t aCbHeight, int32_t aCbOffset, int32_t aCbSkip,
+           void *aCrData, int32_t aCrStride, int32_t aCrWidth, int32_t aCrHeight, int32_t aCrOffset, int32_t aCrSkip)
+  {
+    mTimeUs = aTimeUs;
+    mKeyFrame = aKeyFrame;
+    mData = aData;
+    mSize = aSize;
+    mStride = aStride;
+    mSliceHeight = aSliceHeight;
+    mRotation = aRotation;
+    mGraphicBuffer = nullptr;
+    Y.mData = aYData;
+    Y.mStride = aYStride;
+    Y.mWidth = aYWidth;
+    Y.mHeight = aYHeight;
+    Y.mOffset = aYOffset;
+    Y.mSkip = aYSkip;
+    Cb.mData = aCbData;
+    Cb.mStride = aCbStride;
+    Cb.mWidth = aCbWidth;
+    Cb.mHeight = aCbHeight;
+    Cb.mOffset = aCbOffset;
+    Cb.mSkip = aCbSkip;
+    Cr.mData = aCrData;
+    Cr.mStride = aCrStride;
+    Cr.mWidth = aCrWidth;
+    Cr.mHeight = aCrHeight;
+    Cr.mOffset = aCrOffset;
+    Cr.mSkip = aCrSkip;
+  }
+};
+
+struct AudioFrame {
+  int64_t mTimeUs;
+  void *mData; // 16PCM interleaved
+  size_t mSize; // Size of mData in bytes
+  int32_t mAudioChannels;
+  int32_t mAudioSampleRate;
+
+  AudioFrame() :
+    mTimeUs(0),
+    mData(0),
+    mSize(0),
+    mAudioChannels(0),
+    mAudioSampleRate(0)
+  {
+  }
+
+  void Set(int64_t aTimeUs,
+           void *aData, size_t aSize,
+           int32_t aAudioChannels, int32_t aAudioSampleRate)
+  {
+    mTimeUs = aTimeUs;
+    mData = aData;
+    mSize = aSize;
+    mAudioChannels = aAudioChannels;
+    mAudioSampleRate = aAudioSampleRate;
+  }
+};
+
+struct Decoder;
+
+struct PluginHost {
+  bool (*Read)(Decoder *aDecoder, char *aBuffer, int64_t aOffset, uint32_t aCount, uint32_t* aBytes);
+  uint64_t (*GetLength)(Decoder *aDecoder);
+  void (*SetMetaDataReadMode)(Decoder *aDecoder);
+  void (*SetPlaybackReadMode)(Decoder *aDecoder);
+};
+
+struct Decoder {
+  void *mResource;
+  void *mPrivate;
+
+  Decoder();
+
+  void (*GetDuration)(Decoder *aDecoder, int64_t *durationUs);
+  void (*GetVideoParameters)(Decoder *aDecoder, int32_t *aWidth, int32_t *aHeight);
+  void (*GetAudioParameters)(Decoder *aDecoder, int32_t *aNumChannels, int32_t *aSampleRate);
+  bool (*HasVideo)(Decoder *aDecoder);
+  bool (*HasAudio)(Decoder *aDecoder);
+  bool (*ReadVideo)(Decoder *aDecoder, VideoFrame *aFrame, int64_t aSeekTimeUs);
+  bool (*ReadAudio)(Decoder *aDecoder, AudioFrame *aFrame, int64_t aSeekTimeUs);
+  void (*DestroyDecoder)(Decoder *);
+};
+
+struct Manifest {
+  bool (*CanDecode)(const char *aMimeChars, size_t aMimeLen, const char* const**aCodecs);
+  bool (*CreateDecoder)(PluginHost *aPluginHost, Decoder *aDecoder,
+                        const char *aMimeChars, size_t aMimeLen);
+};
+
+}
+
+#endif
new file mode 100644
--- /dev/null
+++ b/content/media/omx/Makefile.in
@@ -0,0 +1,42 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this file,
+# You can obtain one at http://mozilla.org/MPL/2.0/.
+
+DEPTH		= @DEPTH@
+topsrcdir	= @top_srcdir@
+srcdir		= @srcdir@
+VPATH		= @srcdir@
+
+include $(DEPTH)/config/autoconf.mk
+
+MODULE		= content
+LIBRARY_NAME	= gkconomx_s
+LIBXUL_LIBRARY 	= 1
+
+EXPORTS		+= \
+		nsMediaOmxDecoder.h \
+		$(NULL)
+
+CPPSRCS		= \
+		nsMediaOmxDecoder.cpp \
+		nsMediaOmxReader.cpp \
+		OmxDecoder.cpp \
+		$(NULL)
+
+FORCE_STATIC_LIB = 1
+
+include $(topsrcdir)/config/rules.mk
+include $(topsrcdir)/ipc/chromium/chromium-config.mk
+
+INCLUDES	+= \
+		-I$(topsrcdir)/ipc/chromium/src \
+		-I$(srcdir)/../../base/src \
+		-I$(srcdir)/../../html/content/src \
+		-I$(ANDROID_SOURCE)/dalvik/libnativehelper/include/nativehelper \
+		-I$(ANDROID_SOURCE)/frameworks/base/include/ \
+		-I$(ANDROID_SOURCE)/frameworks/base/include/binder/ \
+		-I$(ANDROID_SOURCE)/frameworks/base/include/utils/ \
+		-I$(ANDROID_SOURCE)/frameworks/base/include/media/ \
+		-I$(ANDROID_SOURCE)/frameworks/base/include/media/stagefright/openmax \
+		-I$(ANDROID_SOURCE)/frameworks/base/media/libstagefright/include/ \
+		$(NULL)
new file mode 100644
--- /dev/null
+++ b/content/media/omx/OmxDecoder.cpp
@@ -0,0 +1,570 @@
+/* -*- Mode: C++; tab-width: 2; indent-tabs-mode: nil; c-basic-offset: 2 -*- */
+/* vim:set ts=2 sw=2 sts=2 et cindent: */
+/* This Source Code Form is subject to the terms of the Mozilla Public
+ * License, v. 2.0. If a copy of the MPL was not distributed with this file,
+ * You can obtain one at http://mozilla.org/MPL/2.0/. */
+#include <unistd.h>
+#include <fcntl.h>
+
+#include "base/basictypes.h"
+#include <stagefright/DataSource.h>
+#include <stagefright/MediaExtractor.h>
+#include <stagefright/MetaData.h>
+#include <stagefright/OMXCodec.h>
+#include <OMX.h>
+
+#include "mozilla/Preferences.h"
+#include "mozilla/Types.h"
+#include "MPAPI.h"
+#include "prlog.h"
+
+#include "GonkNativeWindow.h"
+#include "OmxDecoder.h"
+
+#ifdef PR_LOGGING
+PRLogModuleInfo *gOmxDecoderLog;
+#define LOG(type, msg...) PR_LOG(gOmxDecoderLog, type, (msg))
+#else
+#define LOG(x...)
+#endif
+
+using namespace MPAPI;
+
+namespace mozilla {
+namespace layers {
+
+VideoGraphicBuffer::VideoGraphicBuffer(android::MediaBuffer *aBuffer,
+                                       SurfaceDescriptor *aDescriptor)
+  : GraphicBufferLocked(*aDescriptor),
+    mMediaBuffer(aBuffer)
+{
+  mMediaBuffer->add_ref();
+}
+
+VideoGraphicBuffer::~VideoGraphicBuffer()
+{
+  if (mMediaBuffer) {
+    mMediaBuffer->release();
+  }
+}
+
+void
+VideoGraphicBuffer::Unlock()
+{
+  if (mMediaBuffer) {
+    mMediaBuffer->release();
+    mMediaBuffer = nullptr;
+  }
+}
+
+}
+}
+
+namespace android {
+
+MediaStreamSource::MediaStreamSource(MediaResource *aResource,
+                                     nsBuiltinDecoder *aDecoder) :
+  mDecoder(aDecoder), mResource(aResource)
+{
+}
+
+MediaStreamSource::~MediaStreamSource()
+{
+}
+
+status_t MediaStreamSource::initCheck() const
+{
+  return OK;
+}
+
+ssize_t MediaStreamSource::readAt(off64_t offset, void *data, size_t size)
+{
+  char *ptr = static_cast<char *>(data);
+  size_t todo = size;
+  while (todo > 0) {
+    uint32_t bytesRead;
+    if ((offset != mResource->Tell() &&
+         NS_FAILED(mResource->Seek(nsISeekableStream::NS_SEEK_SET, offset))) ||
+        NS_FAILED(mResource->Read(ptr, todo, &bytesRead))) {
+      return ERROR_IO;
+    }
+    offset += bytesRead;
+    todo -= bytesRead;
+    ptr += bytesRead;
+  }
+  return size;
+}
+
+status_t MediaStreamSource::getSize(off64_t *size)
+{
+  uint64_t length = mResource->GetLength();
+  if (length == static_cast<uint64_t>(-1))
+    return ERROR_UNSUPPORTED;
+
+  *size = length;
+
+  return OK;
+}
+
+}  // namespace android
+
+using namespace android;
+
+OmxDecoder::OmxDecoder(MediaResource *aResource,
+                       nsBuiltinDecoder *aDecoder) :
+  mResource(aResource),
+  mDecoder(aDecoder),
+  mVideoWidth(0),
+  mVideoHeight(0),
+  mVideoColorFormat(0),
+  mVideoStride(0),
+  mVideoSliceHeight(0),
+  mVideoRotation(0),
+  mAudioChannels(-1),
+  mAudioSampleRate(-1),
+  mDurationUs(-1),
+  mVideoBuffer(nullptr),
+  mAudioBuffer(nullptr),
+  mAudioMetadataRead(false)
+{
+}
+
+OmxDecoder::~OmxDecoder()
+{
+  ReleaseVideoBuffer();
+  ReleaseAudioBuffer();
+
+  if (mVideoSource.get()) {
+    mVideoSource->stop();
+  }
+
+  if (mAudioSource.get()) {
+    mAudioSource->stop();
+  }
+}
+
+class AutoStopMediaSource {
+  sp<MediaSource> mMediaSource;
+public:
+  AutoStopMediaSource(const sp<MediaSource>& aMediaSource) : mMediaSource(aMediaSource) {
+  }
+
+  ~AutoStopMediaSource() {
+    mMediaSource->stop();
+  }
+};
+
+static sp<IOMX> sOMX = nullptr;
+static sp<IOMX> GetOMX() {
+  if(sOMX.get() == nullptr) {
+    sOMX = new OMX;
+    }
+  return sOMX;
+}
+
+bool OmxDecoder::Init() {
+#ifdef PR_LOGGING
+  if (!gOmxDecoderLog) {
+    gOmxDecoderLog = PR_NewLogModule("OmxDecoder");
+  }
+#endif
+
+  //register sniffers, if they are not registered in this process.
+  DataSource::RegisterDefaultSniffers();
+
+  sp<DataSource> dataSource = new MediaStreamSource(mResource, mDecoder);
+  if (dataSource->initCheck()) {
+    NS_WARNING("Initializing DataSource for OMX decoder failed");
+    return false;
+  }
+
+  mResource->SetReadMode(nsMediaCacheStream::MODE_METADATA);
+
+  sp<MediaExtractor> extractor = MediaExtractor::Create(dataSource);
+  if (extractor == nullptr) {
+    NS_WARNING("Could not create MediaExtractor");
+    return false;
+  }
+
+  ssize_t audioTrackIndex = -1;
+  ssize_t videoTrackIndex = -1;
+  const char *audioMime = nullptr;
+
+  for (size_t i = 0; i < extractor->countTracks(); ++i) {
+    sp<MetaData> meta = extractor->getTrackMetaData(i);
+
+    int32_t bitRate;
+    if (!meta->findInt32(kKeyBitRate, &bitRate))
+      bitRate = 0;
+
+    const char *mime;
+    if (!meta->findCString(kKeyMIMEType, &mime)) {
+      continue;
+    }
+
+    if (videoTrackIndex == -1 && !strncasecmp(mime, "video/", 6)) {
+      videoTrackIndex = i;
+    } else if (audioTrackIndex == -1 && !strncasecmp(mime, "audio/", 6)) {
+      audioTrackIndex = i;
+      audioMime = mime;
+    }
+  }
+
+  if (videoTrackIndex == -1 && audioTrackIndex == -1) {
+    NS_WARNING("OMX decoder could not find video or audio tracks");
+    return false;
+  }
+
+  mResource->SetReadMode(nsMediaCacheStream::MODE_PLAYBACK);
+
+  int64_t totalDurationUs = 0;
+
+  mNativeWindow = new GonkNativeWindow();
+
+  sp<MediaSource> videoTrack;
+  sp<MediaSource> videoSource;
+  if (videoTrackIndex != -1 && (videoTrack = extractor->getTrack(videoTrackIndex)) != nullptr) {
+    int flags = 0; // prefer hw codecs
+
+    if (mozilla::Preferences::GetBool("media.omx.prefer_software_codecs", false)) {
+      flags |= kPreferSoftwareCodecs;
+    }
+
+    videoSource = OMXCodec::Create(GetOMX(),
+                                   videoTrack->getFormat(),
+                                   false, // decoder
+                                   videoTrack,
+                                   nullptr,
+                                   flags,
+                                   mNativeWindow);
+    if (videoSource == nullptr) {
+      NS_WARNING("Couldn't create OMX video source");
+      return false;
+    }
+
+    if (videoSource->start() != OK) {
+      NS_WARNING("Couldn't start OMX video source");
+      return false;
+    }
+
+    int64_t durationUs;
+    if (videoTrack->getFormat()->findInt64(kKeyDuration, &durationUs)) {
+      if (durationUs > totalDurationUs)
+        totalDurationUs = durationUs;
+    }
+  }
+
+  sp<MediaSource> audioTrack;
+  sp<MediaSource> audioSource;
+  if (audioTrackIndex != -1 && (audioTrack = extractor->getTrack(audioTrackIndex)) != nullptr)
+  {
+    if (!strcasecmp(audioMime, "audio/raw")) {
+      audioSource = audioTrack;
+    } else {
+      audioSource = OMXCodec::Create(GetOMX(),
+                                     audioTrack->getFormat(),
+                                     false, // decoder
+                                     audioTrack);
+    }
+    if (audioSource == nullptr) {
+      NS_WARNING("Couldn't create OMX audio source");
+      return false;
+    }
+    if (audioSource->start() != OK) {
+      NS_WARNING("Couldn't start OMX audio source");
+      return false;
+    }
+
+    int64_t durationUs;
+    if (audioTrack->getFormat()->findInt64(kKeyDuration, &durationUs)) {
+      if (durationUs > totalDurationUs)
+        totalDurationUs = durationUs;
+    }
+  }
+
+  // set decoder state
+  mVideoTrack = videoTrack;
+  mVideoSource = videoSource;
+  mAudioTrack = audioTrack;
+  mAudioSource = audioSource;
+  mDurationUs = totalDurationUs;
+
+  if (mVideoSource.get() && !SetVideoFormat()) {
+    NS_WARNING("Couldn't set OMX video format");
+    return false;
+  }
+
+  // To reliably get the channel and sample rate data we need to read from the
+  // audio source until we get a INFO_FORMAT_CHANGE status
+  if (mAudioSource.get()) {
+    if (mAudioSource->read(&mAudioBuffer) != INFO_FORMAT_CHANGED) {
+      sp<MetaData> meta = mAudioSource->getFormat();
+      if (!meta->findInt32(kKeyChannelCount, &mAudioChannels) ||
+          !meta->findInt32(kKeySampleRate, &mAudioSampleRate)) {
+        NS_WARNING("Couldn't get audio metadata from OMX decoder");
+        return false;
+      }
+      mAudioMetadataRead = true;
+    }
+    else if (!SetAudioFormat()) {
+      NS_WARNING("Couldn't set audio format");
+      return false;
+    }
+  }
+
+  return true;
+}
+
+bool OmxDecoder::SetVideoFormat() {
+  const char *componentName;
+
+  if (!mVideoSource->getFormat()->findInt32(kKeyWidth, &mVideoWidth) ||
+      !mVideoSource->getFormat()->findInt32(kKeyHeight, &mVideoHeight) ||
+      !mVideoSource->getFormat()->findCString(kKeyDecoderComponent, &componentName) ||
+      !mVideoSource->getFormat()->findInt32(kKeyColorFormat, &mVideoColorFormat) ) {
+    return false;
+  }
+
+  if (!mVideoSource->getFormat()->findInt32(kKeyStride, &mVideoStride)) {
+    mVideoStride = mVideoWidth;
+    NS_WARNING("stride not available, assuming width");
+  }
+
+  if (!mVideoSource->getFormat()->findInt32(kKeySliceHeight, &mVideoSliceHeight)) {
+    mVideoSliceHeight = mVideoHeight;
+    NS_WARNING("slice height not available, assuming height");
+  }
+
+  if (!mVideoSource->getFormat()->findInt32(kKeyRotation, &mVideoRotation)) {
+    mVideoRotation = 0;
+    NS_WARNING("rotation not available, assuming 0");
+  }
+
+  LOG(PR_LOG_DEBUG, "width: %d height: %d component: %s format: %d stride: %d sliceHeight: %d rotation: %d",
+      mVideoWidth, mVideoHeight, componentName, mVideoColorFormat,
+      mVideoStride, mVideoSliceHeight, mVideoRotation);
+
+  return true;
+}
+
+bool OmxDecoder::SetAudioFormat() {
+  // If the format changed, update our cached info.
+  if (!mAudioSource->getFormat()->findInt32(kKeyChannelCount, &mAudioChannels) ||
+      !mAudioSource->getFormat()->findInt32(kKeySampleRate, &mAudioSampleRate)) {
+    return false;
+  }
+
+  LOG(PR_LOG_DEBUG, "channelCount: %d sampleRate: %d",
+      mAudioChannels, mAudioSampleRate);
+
+  return true;
+}
+
+void OmxDecoder::ReleaseVideoBuffer() {
+  if (mVideoBuffer) {
+    mVideoBuffer->release();
+    mVideoBuffer = nullptr;
+  }
+}
+
+void OmxDecoder::ReleaseAudioBuffer() {
+  if (mAudioBuffer) {
+    mAudioBuffer->release();
+    mAudioBuffer = nullptr;
+  }
+}
+
+void OmxDecoder::PlanarYUV420Frame(VideoFrame *aFrame, int64_t aTimeUs, void *aData, size_t aSize, bool aKeyFrame) {
+  void *y = aData;
+  void *u = static_cast<uint8_t *>(y) + mVideoStride * mVideoSliceHeight;
+  void *v = static_cast<uint8_t *>(u) + mVideoStride/2 * mVideoSliceHeight/2;
+
+  aFrame->Set(aTimeUs, aKeyFrame,
+              aData, aSize, mVideoStride, mVideoSliceHeight, mVideoRotation,
+              y, mVideoStride, mVideoWidth, mVideoHeight, 0, 0,
+              u, mVideoStride/2, mVideoWidth/2, mVideoHeight/2, 0, 0,
+              v, mVideoStride/2, mVideoWidth/2, mVideoHeight/2, 0, 0);
+}
+
+void OmxDecoder::CbYCrYFrame(VideoFrame *aFrame, int64_t aTimeUs, void *aData, size_t aSize, bool aKeyFrame) {
+  aFrame->Set(aTimeUs, aKeyFrame,
+              aData, aSize, mVideoStride, mVideoSliceHeight, mVideoRotation,
+              aData, mVideoStride, mVideoWidth, mVideoHeight, 1, 1,
+              aData, mVideoStride, mVideoWidth/2, mVideoHeight/2, 0, 3,
+              aData, mVideoStride, mVideoWidth/2, mVideoHeight/2, 2, 3);
+}
+
+void OmxDecoder::SemiPlanarYUV420Frame(VideoFrame *aFrame, int64_t aTimeUs, void *aData, size_t aSize, bool aKeyFrame) {
+  void *y = aData;
+  void *uv = static_cast<uint8_t *>(y) + (mVideoStride * mVideoSliceHeight);
+
+  aFrame->Set(aTimeUs, aKeyFrame,
+              aData, aSize, mVideoStride, mVideoSliceHeight, mVideoRotation,
+              y, mVideoStride, mVideoWidth, mVideoHeight, 0, 0,
+              uv, mVideoStride, mVideoWidth/2, mVideoHeight/2, 0, 1,
+              uv, mVideoStride, mVideoWidth/2, mVideoHeight/2, 1, 1);
+}
+
+void OmxDecoder::SemiPlanarYVU420Frame(VideoFrame *aFrame, int64_t aTimeUs, void *aData, size_t aSize, bool aKeyFrame) {
+  SemiPlanarYUV420Frame(aFrame, aTimeUs, aData, aSize, aKeyFrame);
+  aFrame->Cb.mOffset = 1;
+  aFrame->Cr.mOffset = 0;
+}
+
+bool OmxDecoder::ToVideoFrame(VideoFrame *aFrame, int64_t aTimeUs, void *aData, size_t aSize, bool aKeyFrame) {
+  const int OMX_QCOM_COLOR_FormatYVU420SemiPlanar = 0x7FA30C00;
+
+  aFrame->mGraphicBuffer = nullptr;
+
+  switch (mVideoColorFormat) {
+  case OMX_COLOR_FormatYUV420Planar:
+    PlanarYUV420Frame(aFrame, aTimeUs, aData, aSize, aKeyFrame);
+    break;
+  case OMX_COLOR_FormatCbYCrY:
+    CbYCrYFrame(aFrame, aTimeUs, aData, aSize, aKeyFrame);
+    break;
+  case OMX_COLOR_FormatYUV420SemiPlanar:
+    SemiPlanarYUV420Frame(aFrame, aTimeUs, aData, aSize, aKeyFrame);
+    break;
+  case OMX_QCOM_COLOR_FormatYVU420SemiPlanar:
+    SemiPlanarYVU420Frame(aFrame, aTimeUs, aData, aSize, aKeyFrame);
+    break;
+  default:
+    LOG(PR_LOG_DEBUG, "Unknown video color format %08x", mVideoColorFormat);
+    return false;
+  }
+  return true;
+}
+
+bool OmxDecoder::ToAudioFrame(AudioFrame *aFrame, int64_t aTimeUs, void *aData, size_t aDataOffset, size_t aSize, int32_t aAudioChannels, int32_t aAudioSampleRate)
+{
+  aFrame->Set(aTimeUs, static_cast<char *>(aData) + aDataOffset, aSize, aAudioChannels, aAudioSampleRate);
+  return true;
+}
+
+bool OmxDecoder::ReadVideo(VideoFrame *aFrame, int64_t aTimeUs,
+                           bool aKeyframeSkip, bool aDoSeek)
+{
+  if (!mVideoSource.get())
+    return false;
+
+  ReleaseVideoBuffer();
+
+  status_t err;
+
+  if (aDoSeek || aKeyframeSkip) {
+    MediaSource::ReadOptions options;
+    options.setSeekTo(aTimeUs, aDoSeek ? MediaSource::ReadOptions::SEEK_PREVIOUS_SYNC :
+                                         MediaSource::ReadOptions::SEEK_NEXT_SYNC);
+    err = mVideoSource->read(&mVideoBuffer, &options);
+  } else {
+    err = mVideoSource->read(&mVideoBuffer);
+  }
+
+  if (err == OK && mVideoBuffer->range_length() > 0) {
+    int64_t timeUs;
+    int64_t durationUs;
+    int32_t unreadable;
+    int32_t keyFrame;
+
+    if (!mVideoBuffer->meta_data()->findInt64(kKeyTime, &timeUs) ) {
+      NS_WARNING("OMX decoder did not return frame time");
+      return false;
+    }
+
+    if (!mVideoBuffer->meta_data()->findInt32(kKeyIsSyncFrame, &keyFrame)) {
+      keyFrame = 0;
+    }
+
+    if (!mVideoBuffer->meta_data()->findInt32(kKeyIsUnreadable, &unreadable)) {
+      unreadable = 0;
+    }
+
+    mozilla::layers::SurfaceDescriptor *descriptor = nullptr;
+    if ((mVideoBuffer->graphicBuffer().get())) {
+      descriptor = mNativeWindow->getSurfaceDescriptorFromBuffer(mVideoBuffer->graphicBuffer().get());
+    }
+
+    if (descriptor) {
+      aFrame->mGraphicBuffer = new mozilla::layers::VideoGraphicBuffer(mVideoBuffer, descriptor);
+      aFrame->mRotation = mVideoRotation;
+      aFrame->mTimeUs = timeUs;
+      aFrame->mEndTimeUs = timeUs + durationUs;
+      aFrame->mKeyFrame = keyFrame;
+      aFrame->Y.mWidth = mVideoWidth;
+      aFrame->Y.mHeight = mVideoHeight;
+    } else {
+      char *data = static_cast<char *>(mVideoBuffer->data()) + mVideoBuffer->range_offset();
+      size_t length = mVideoBuffer->range_length();
+
+      if (unreadable) {
+        LOG(PR_LOG_DEBUG, "video frame is unreadable");
+      }
+
+      if (!ToVideoFrame(aFrame, timeUs, data, length, keyFrame)) {
+        return false;
+      }
+
+      aFrame->mEndTimeUs = timeUs + durationUs;
+    }
+
+  }
+  else if (err == INFO_FORMAT_CHANGED) {
+    // If the format changed, update our cached info.
+    if (!SetVideoFormat()) {
+      return false;
+    } else {
+      return ReadVideo(aFrame, aTimeUs, aKeyframeSkip, aDoSeek);
+    }
+  }
+  else if (err == ERROR_END_OF_STREAM) {
+    return false;
+  }
+
+  return true;
+}
+
+bool OmxDecoder::ReadAudio(AudioFrame *aFrame, int64_t aSeekTimeUs)
+{
+  status_t err;
+
+  if (mAudioMetadataRead && aSeekTimeUs == -1) {
+    // Use the data read into the buffer during metadata time
+    err = OK;
+  }
+  else {
+    ReleaseAudioBuffer();
+    if (aSeekTimeUs != -1) {
+      MediaSource::ReadOptions options;
+      options.setSeekTo(aSeekTimeUs);
+      err = mAudioSource->read(&mAudioBuffer, &options);
+    } else {
+      err = mAudioSource->read(&mAudioBuffer);
+    }
+  }
+  mAudioMetadataRead = false;
+
+  aSeekTimeUs = -1;
+
+  if (err == OK && mAudioBuffer->range_length() != 0) {
+    int64_t timeUs;
+    if (!mAudioBuffer->meta_data()->findInt64(kKeyTime, &timeUs))
+      return false;
+
+    return ToAudioFrame(aFrame, timeUs,
+                        mAudioBuffer->data(),
+                        mAudioBuffer->range_offset(),
+                        mAudioBuffer->range_length(),
+                        mAudioChannels, mAudioSampleRate);
+  }
+  else if (err == INFO_FORMAT_CHANGED) {
+    // If the format changed, update our cached info.
+    if (!SetAudioFormat()) {
+      return false;
+    } else {
+      return ReadAudio(aFrame, aSeekTimeUs);
+    }
+  }
+
+  return true;
+}
new file mode 100644
--- /dev/null
+++ b/content/media/omx/OmxDecoder.h
@@ -0,0 +1,151 @@
+#include <OMX.h>
+#include <stagefright/MediaSource.h>
+#include <stagefright/DataSource.h>
+
+#include <utils/RefBase.h>
+
+#include "GonkNativeWindow.h"
+#include "GonkIOSurfaceImage.h"
+#include "MPAPI.h"
+#include "MediaResource.h"
+#include "nsBuiltinDecoder.h"
+
+namespace mozilla {
+namespace layers {
+
+class VideoGraphicBuffer : public GraphicBufferLocked {
+  // XXX change this to an actual smart pointer at some point
+  android::MediaBuffer *mMediaBuffer;
+  public:
+    VideoGraphicBuffer(android::MediaBuffer *aBuffer,
+                       SurfaceDescriptor *aDescriptor);
+    ~VideoGraphicBuffer();
+    void Unlock();
+};
+
+}
+}
+
+namespace android {
+
+// MediaStreamSource is a DataSource that reads from a MPAPI media stream.
+class MediaStreamSource : public DataSource {
+  typedef mozilla::MediaResource MediaResource;
+
+  MediaResource *mResource;
+  nsBuiltinDecoder *mDecoder;
+public:
+  MediaStreamSource(MediaResource *aResource,
+                    nsBuiltinDecoder *aDecoder);
+
+  virtual status_t initCheck() const;
+  virtual ssize_t readAt(off64_t offset, void *data, size_t size);
+  virtual ssize_t readAt(off_t offset, void *data, size_t size) {
+    return readAt(static_cast<off64_t>(offset), data, size);
+  }
+  virtual status_t getSize(off_t *size) {
+    off64_t size64;
+    status_t status = getSize(&size64);
+    *size = size64;
+    return status;
+  }
+  virtual status_t getSize(off64_t *size);
+  virtual uint32_t flags() {
+    return kWantsPrefetching;
+  }
+
+  virtual ~MediaStreamSource();
+
+private:
+  MediaStreamSource(const MediaStreamSource &);
+  MediaStreamSource &operator=(const MediaStreamSource &);
+};
+
+class OmxDecoder {
+  typedef MPAPI::AudioFrame AudioFrame;
+  typedef MPAPI::VideoFrame VideoFrame;
+  typedef mozilla::MediaResource MediaResource;
+
+  enum {
+    kPreferSoftwareCodecs = 1
+  };
+
+  nsBuiltinDecoder *mDecoder;
+  MediaResource *mResource;
+  sp<GonkNativeWindow> mNativeWindow;
+  sp<MediaSource> mVideoTrack;
+  sp<MediaSource> mVideoSource;
+  sp<MediaSource> mAudioTrack;
+  sp<MediaSource> mAudioSource;
+  int32_t mVideoWidth;
+  int32_t mVideoHeight;
+  int32_t mVideoColorFormat;
+  int32_t mVideoStride;
+  int32_t mVideoSliceHeight;
+  int32_t mVideoRotation;
+  int32_t mAudioChannels;
+  int32_t mAudioSampleRate;
+  int64_t mDurationUs;
+  VideoFrame mVideoFrame;
+  AudioFrame mAudioFrame;
+
+  // Lifetime of these should be handled by OMXCodec, as long as we release
+  //   them after use: see ReleaseVideoBuffer(), ReleaseAudioBuffer()
+  MediaBuffer *mVideoBuffer;
+  MediaBuffer *mAudioBuffer;
+
+  // 'true' if a read from the audio stream was done while reading the metadata
+  bool mAudioMetadataRead;
+
+  void ReleaseVideoBuffer();
+  void ReleaseAudioBuffer();
+
+  void PlanarYUV420Frame(VideoFrame *aFrame, int64_t aTimeUs, void *aData, size_t aSize, bool aKeyFrame);
+  void CbYCrYFrame(VideoFrame *aFrame, int64_t aTimeUs, void *aData, size_t aSize, bool aKeyFrame);
+  void SemiPlanarYUV420Frame(VideoFrame *aFrame, int64_t aTimeUs, void *aData, size_t aSize, bool aKeyFrame);
+  void SemiPlanarYVU420Frame(VideoFrame *aFrame, int64_t aTimeUs, void *aData, size_t aSize, bool aKeyFrame);
+  bool ToVideoFrame(VideoFrame *aFrame, int64_t aTimeUs, void *aData, size_t aSize, bool aKeyFrame);
+  bool ToAudioFrame(AudioFrame *aFrame, int64_t aTimeUs, void *aData, size_t aDataOffset, size_t aSize,
+                    int32_t aAudioChannels, int32_t aAudioSampleRate);
+public:
+  OmxDecoder(MediaResource *aResource, nsBuiltinDecoder *aDecoder);
+  ~OmxDecoder();
+
+  bool Init();
+  bool SetVideoFormat();
+  bool SetAudioFormat();
+
+  void GetDuration(int64_t *durationUs) {
+    *durationUs = mDurationUs;
+  }
+
+  void GetVideoParameters(int32_t *width, int32_t *height) {
+    *width = mVideoWidth;
+    *height = mVideoHeight;
+  }
+
+  void GetAudioParameters(int32_t *numChannels, int32_t *sampleRate) {
+    *numChannels = mAudioChannels;
+    *sampleRate = mAudioSampleRate;
+  }
+
+  bool HasVideo() {
+    return mVideoSource != nullptr;
+  }
+
+  bool HasAudio() {
+    return mAudioSource != nullptr;
+  }
+
+  bool ReadVideo(VideoFrame *aFrame, int64_t aSeekTimeUs, 
+                 bool aKeyframeSkip = false,
+                 bool aDoSeek = false);
+  bool ReadAudio(AudioFrame *aFrame, int64_t aSeekTimeUs);
+
+  MediaResource *GetResource() {
+    return mResource;
+  }
+};
+
+}
+
new file mode 100644
--- /dev/null
+++ b/content/media/omx/nsMediaOmxDecoder.cpp
@@ -0,0 +1,28 @@
+/* -*- Mode: C++; tab-width: 2; indent-tabs-mode: nil; c-basic-offset: 2 -*- */
+/* vim:set ts=2 sw=2 sts=2 et cindent: */
+/* This Source Code Form is subject to the terms of the Mozilla Public
+ * License, v. 2.0. If a copy of the MPL was not distributed with this file,
+ * You can obtain one at http://mozilla.org/MPL/2.0/. */
+
+#include "nsMediaOmxDecoder.h"
+#include "nsMediaOmxReader.h"
+#include "nsBuiltinDecoderStateMachine.h"
+
+nsMediaOmxDecoder::nsMediaOmxDecoder() :
+  nsBuiltinDecoder()
+{
+}
+
+nsMediaDecoder* nsMediaOmxDecoder::Clone()
+{
+  return new nsMediaOmxDecoder();
+}
+
+nsDecoderStateMachine* nsMediaOmxDecoder::CreateStateMachine()
+{
+  return new nsBuiltinDecoderStateMachine(this, new nsMediaOmxReader(this));
+}
+
+nsMediaOmxDecoder::~nsMediaOmxDecoder()
+{
+}
new file mode 100644
--- /dev/null
+++ b/content/media/omx/nsMediaOmxDecoder.h
@@ -0,0 +1,22 @@
+/* -*- Mode: C++; tab-width: 2; indent-tabs-mode: nil; c-basic-offset: 2 -*- */
+/* vim:set ts=2 sw=2 sts=2 et cindent: */
+/* This Source Code Form is subject to the terms of the Mozilla Public
+ * License, v. 2.0. If a copy of the MPL was not distributed with this file,
+ * You can obtain one at http://mozilla.org/MPL/2.0/. */
+#if !defined(nsMediaOmxDecoder_h_)
+#define nsMediaOmxDecoder_h_
+
+#include "base/basictypes.h"
+#include "nsBuiltinDecoder.h"
+
+class nsMediaOmxDecoder : public nsBuiltinDecoder
+{
+public:
+  nsMediaOmxDecoder();
+  ~nsMediaOmxDecoder();
+
+  virtual nsMediaDecoder* Clone();
+  virtual nsDecoderStateMachine* CreateStateMachine();
+};
+
+#endif
new file mode 100644
--- /dev/null
+++ b/content/media/omx/nsMediaOmxReader.cpp
@@ -0,0 +1,355 @@
+/* -*- Mode: C++; tab-width: 2; indent-tabs-mode: nil; c-basic-offset: 2 -*- */
+/* vim:set ts=2 sw=2 sts=2 et cindent: */
+/* This Source Code Form is subject to the terms of the Mozilla Public
+ * License, v. 2.0. If a copy of the MPL was not distributed with this file,
+ * You can obtain one at http://mozilla.org/MPL/2.0/. */
+
+#include "nsMediaOmxReader.h"
+
+#include "mozilla/TimeStamp.h"
+#include "nsTimeRanges.h"
+#include "MediaResource.h"
+#include "VideoUtils.h"
+#include "nsMediaOmxDecoder.h"
+
+using namespace android;
+using namespace mozilla;
+
+nsMediaOmxReader::nsMediaOmxReader(nsBuiltinDecoder *aDecoder) :
+  nsBuiltinDecoderReader(aDecoder),
+  mOmxDecoder(nullptr),
+  mHasVideo(false),
+  mHasAudio(false),
+  mVideoSeekTimeUs(-1),
+  mAudioSeekTimeUs(-1),
+  mLastVideoFrame(nullptr)
+{
+}
+
+nsMediaOmxReader::~nsMediaOmxReader()
+{
+  ResetDecode();
+}
+
+nsresult nsMediaOmxReader::Init(nsBuiltinDecoderReader* aCloneDonor)
+{
+  return NS_OK;
+}
+
+nsresult nsMediaOmxReader::ReadMetadata(nsVideoInfo* aInfo,
+                                        nsHTMLMediaElement::MetadataTags** aTags)
+{
+  NS_ASSERTION(mDecoder->OnDecodeThread(), "Should be on decode thread.");
+
+  *aTags = nullptr;
+
+  if (!mOmxDecoder) {
+    mOmxDecoder = new OmxDecoder(mDecoder->GetResource(), mDecoder);
+    mOmxDecoder->Init();
+  }
+
+  // Set the total duration (the max of the audio and video track).
+  int64_t durationUs;
+  mOmxDecoder->GetDuration(&durationUs);
+  if (durationUs) {
+    ReentrantMonitorAutoEnter mon(mDecoder->GetReentrantMonitor());
+    mDecoder->GetStateMachine()->SetDuration(durationUs);
+  }
+
+  if (mOmxDecoder->HasVideo()) {
+    int32_t width, height;
+    mOmxDecoder->GetVideoParameters(&width, &height);
+    nsIntRect pictureRect(0, 0, width, height);
+
+    // Validate the container-reported frame and pictureRect sizes. This ensures
+    // that our video frame creation code doesn't overflow.
+    nsIntSize displaySize(width, height);
+    nsIntSize frameSize(width, height);
+    if (!nsVideoInfo::ValidateVideoRegion(frameSize, pictureRect, displaySize)) {
+      return NS_ERROR_FAILURE;
+    }
+
+    // Video track's frame sizes will not overflow. Activate the video track.
+    mHasVideo = mInfo.mHasVideo = true;
+    mInfo.mDisplay = displaySize;
+    mPicture = pictureRect;
+    mInitialFrame = frameSize;
+    VideoFrameContainer* container = mDecoder->GetVideoFrameContainer();
+    if (container) {
+      container->SetCurrentFrame(gfxIntSize(displaySize.width, displaySize.height),
+                                 nullptr,
+                                 mozilla::TimeStamp::Now());
+    }
+  }
+
+  if (mOmxDecoder->HasAudio()) {
+    int32_t numChannels, sampleRate;
+    mOmxDecoder->GetAudioParameters(&numChannels, &sampleRate);
+    mHasAudio = mInfo.mHasAudio = true;
+    mInfo.mAudioChannels = numChannels;
+    mInfo.mAudioRate = sampleRate;
+  }
+
+ *aInfo = mInfo;
+
+  return NS_OK;
+}
+
+// Resets all state related to decoding, emptying all buffers etc.
+nsresult nsMediaOmxReader::ResetDecode()
+{
+  if (mLastVideoFrame) {
+    delete mLastVideoFrame;
+    mLastVideoFrame = nullptr;
+  }
+  if (mOmxDecoder) {
+    delete mOmxDecoder;
+    mOmxDecoder = nullptr;
+  }
+  return NS_OK;
+}
+
+bool nsMediaOmxReader::DecodeVideoFrame(bool &aKeyframeSkip,
+                                        int64_t aTimeThreshold)
+{
+  // Record number of frames decoded and parsed. Automatically update the
+  // stats counters using the AutoNotifyDecoded stack-based class.
+  uint32_t parsed = 0, decoded = 0;
+  nsMediaDecoder::AutoNotifyDecoded autoNotify(mDecoder, parsed, decoded);
+
+  // Throw away the currently buffered frame if we are seeking.
+  if (mLastVideoFrame && mVideoSeekTimeUs != -1) {
+    delete mLastVideoFrame;
+    mLastVideoFrame = nullptr;
+  }
+
+  bool doSeek = mVideoSeekTimeUs != -1;
+  if (doSeek) {
+    aTimeThreshold = mVideoSeekTimeUs;
+  }
+
+  // Read next frame
+  while (true) {
+    MPAPI::VideoFrame frame;
+    frame.mGraphicBuffer = nullptr;
+    if (!mOmxDecoder->ReadVideo(&frame, aTimeThreshold, aKeyframeSkip, doSeek)) {
+      // We reached the end of the video stream. If we have a buffered
+      // video frame, push it the video queue using the total duration
+      // of the video as the end time.
+      if (mLastVideoFrame) {
+        int64_t durationUs;
+        mOmxDecoder->GetDuration(&durationUs);
+        mLastVideoFrame->mEndTime = (durationUs > mLastVideoFrame->mTime)
+                                  ? durationUs
+                                  : mLastVideoFrame->mTime;
+        mVideoQueue.Push(mLastVideoFrame);
+        mLastVideoFrame = nullptr;
+      }
+      mVideoQueue.Finish();
+      return false;
+    }
+
+    mVideoSeekTimeUs = -1;
+    doSeek = aKeyframeSkip = false;
+
+    nsIntRect picture = mPicture;
+    if (frame.Y.mWidth != mInitialFrame.width ||
+        frame.Y.mHeight != mInitialFrame.height) {
+
+      // Frame size is different from what the container reports. This is legal,
+      // and we will preserve the ratio of the crop rectangle as it
+      // was reported relative to the picture size reported by the container.
+      picture.x = (mPicture.x * frame.Y.mWidth) / mInitialFrame.width;
+      picture.y = (mPicture.y * frame.Y.mHeight) / mInitialFrame.height;
+      picture.width = (frame.Y.mWidth * mPicture.width) / mInitialFrame.width;
+      picture.height = (frame.Y.mHeight * mPicture.height) / mInitialFrame.height;
+    }
+
+    // This is the approximate byte position in the stream.
+    int64_t pos = mDecoder->GetResource()->Tell();
+
+    VideoData *v;
+    if (!frame.mGraphicBuffer) {
+
+      VideoData::YCbCrBuffer b;
+      b.mPlanes[0].mData = static_cast<uint8_t *>(frame.Y.mData);
+      b.mPlanes[0].mStride = frame.Y.mStride;
+      b.mPlanes[0].mHeight = frame.Y.mHeight;
+      b.mPlanes[0].mWidth = frame.Y.mWidth;
+      b.mPlanes[0].mOffset = frame.Y.mOffset;
+      b.mPlanes[0].mSkip = frame.Y.mSkip;
+
+      b.mPlanes[1].mData = static_cast<uint8_t *>(frame.Cb.mData);
+      b.mPlanes[1].mStride = frame.Cb.mStride;
+      b.mPlanes[1].mHeight = frame.Cb.mHeight;
+      b.mPlanes[1].mWidth = frame.Cb.mWidth;
+      b.mPlanes[1].mOffset = frame.Cb.mOffset;
+      b.mPlanes[1].mSkip = frame.Cb.mSkip;
+
+      b.mPlanes[2].mData = static_cast<uint8_t *>(frame.Cr.mData);
+      b.mPlanes[2].mStride = frame.Cr.mStride;
+      b.mPlanes[2].mHeight = frame.Cr.mHeight;
+      b.mPlanes[2].mWidth = frame.Cr.mWidth;
+      b.mPlanes[2].mOffset = frame.Cr.mOffset;
+      b.mPlanes[2].mSkip = frame.Cr.mSkip;
+
+      v = VideoData::Create(mInfo,
+                            mDecoder->GetImageContainer(),
+                            pos,
+                            frame.mTimeUs,
+                            frame.mTimeUs+1, // We don't know the end time.
+                            b,
+                            frame.mKeyFrame,
+                            -1,
+                            picture);
+    } else {
+      v = VideoData::Create(mInfo,
+                            mDecoder->GetImageContainer(),
+                            pos,
+                            frame.mTimeUs,
+                            frame.mTimeUs+1, // We don't know the end time.
+                            frame.mGraphicBuffer,
+                            frame.mKeyFrame,
+                            -1,
+                            picture);
+    }
+
+    if (!v) {
+      NS_WARNING("Unable to create VideoData");
+      return false;
+    }
+
+    parsed++;
+    decoded++;
+    NS_ASSERTION(decoded <= parsed, "Expect to decode fewer frames than parsed in MediaPlugin...");
+
+    // Seeking hack
+    if (mLastVideoFrame && mLastVideoFrame->mTime > v->mTime) {
+      delete mLastVideoFrame;
+      mLastVideoFrame = v;
+      continue;
+    }
+
+    // Since MPAPI doesn't give us the end time of frames, we keep one frame
+    // buffered in nsMediaOmxReader and push it into the queue as soon
+    // we read the following frame so we can use that frame's start time as
+    // the end time of the buffered frame.
+    if (!mLastVideoFrame) {
+      mLastVideoFrame = v;
+      continue;
+    }
+
+    mLastVideoFrame->mEndTime = v->mTime;
+
+    mVideoQueue.Push(mLastVideoFrame);
+
+    // Buffer the current frame we just decoded.
+    mLastVideoFrame = v;
+
+    break;
+  }
+
+  return true;
+}
+
+bool nsMediaOmxReader::DecodeAudioData()
+{
+  NS_ASSERTION(mDecoder->OnDecodeThread(), "Should be on decode thread.");
+
+  // This is the approximate byte position in the stream.
+  int64_t pos = mDecoder->GetResource()->Tell();
+
+  // Read next frame
+  MPAPI::AudioFrame frame;
+  if (!mOmxDecoder->ReadAudio(&frame, mAudioSeekTimeUs)) {
+    mAudioQueue.Finish();
+    return false;
+  }
+  mAudioSeekTimeUs = -1;
+
+  // Ignore empty buffer which stagefright media read will sporadically return
+  if (frame.mSize == 0) {
+    return true;
+  }
+
+  nsAutoArrayPtr<AudioDataValue> buffer(new AudioDataValue[frame.mSize/2] );
+  memcpy(buffer.get(), frame.mData, frame.mSize);
+
+  uint32_t frames = frame.mSize / (2 * frame.mAudioChannels);
+  CheckedInt64 duration = FramesToUsecs(frames, frame.mAudioSampleRate);
+  if (!duration.isValid()) {
+    return false;
+  }
+
+  mAudioQueue.Push(new AudioData(pos,
+                                 frame.mTimeUs,
+                                 duration.value(),
+                                 frames,
+                                 buffer.forget(),
+                                 frame.mAudioChannels));
+  return true;
+}
+
+nsresult nsMediaOmxReader::Seek(int64_t aTarget, int64_t aStartTime, int64_t aEndTime, int64_t aCurrentTime)
+{
+  NS_ASSERTION(mDecoder->OnDecodeThread(), "Should be on decode thread.");
+
+  mVideoQueue.Reset();
+  mAudioQueue.Reset();
+
+  mAudioSeekTimeUs = mVideoSeekTimeUs = aTarget;
+
+  return DecodeToTarget(aTarget);
+}
+
+static uint64_t BytesToTime(int64_t offset, uint64_t length, uint64_t durationUs) {
+  double perc = double(offset) / double(length);
+  if (perc > 1.0)
+    perc = 1.0;
+  return uint64_t(double(durationUs) * perc);
+}
+
+nsresult nsMediaOmxReader::GetBuffered(nsTimeRanges* aBuffered, int64_t aStartTime)
+{
+  if (!mOmxDecoder)
+    return NS_OK;
+
+  MediaResource* stream = mOmxDecoder->GetResource();
+
+  int64_t durationUs = 0;
+  mOmxDecoder->GetDuration(&durationUs);
+
+  // Nothing to cache if the media takes 0us to play.
+  if (!durationUs)
+    return NS_OK;
+
+  // Special case completely cached files.  This also handles local files.
+  if (stream->IsDataCachedToEndOfResource(0)) {
+    aBuffered->Add(0, durationUs);
+    return NS_OK;
+  }
+
+  int64_t totalBytes = stream->GetLength();
+
+  // If we can't determine the total size, pretend that we have nothing
+  // buffered. This will put us in a state of eternally-low-on-undecoded-data
+  // which is not get, but about the best we can do.
+  if (totalBytes == -1)
+    return NS_OK;
+
+  int64_t startOffset = stream->GetNextCachedData(0);
+  while (startOffset >= 0) {
+    int64_t endOffset = stream->GetCachedDataEnd(startOffset);
+    // Bytes [startOffset..endOffset] are cached.
+    NS_ASSERTION(startOffset >= 0, "Integer underflow in GetBuffered");
+    NS_ASSERTION(endOffset >= 0, "Integer underflow in GetBuffered");
+
+    uint64_t startUs = BytesToTime(startOffset, totalBytes, durationUs);
+    uint64_t endUs = BytesToTime(endOffset, totalBytes, durationUs);
+    if (startUs != endUs) {
+      aBuffered->Add((double)startUs / USECS_PER_S, (double)endUs / USECS_PER_S);
+    }
+    startOffset = stream->GetNextCachedData(endOffset);
+  }
+  return NS_OK;
+}
new file mode 100644
--- /dev/null
+++ b/content/media/omx/nsMediaOmxReader.h
@@ -0,0 +1,59 @@
+/* -*- Mode: C++; tab-width: 2; indent-tabs-mode: nil; c-basic-offset: 2 -*- */
+/* vim:set ts=2 sw=2 sts=2 et cindent: */
+/* This Source Code Form is subject to the terms of the Mozilla Public
+ * License, v. 2.0. If a copy of the MPL was not distributed with this file,
+ * You can obtain one at http://mozilla.org/MPL/2.0/. */
+#if !defined(nsMediaOmxReader_h_)
+#define nsMediaOmxReader_h_
+
+#include "base/basictypes.h"
+#include "MediaResource.h"
+#include "nsBuiltinDecoder.h"
+#include "nsBuiltinDecoderReader.h"
+#include "OmxDecoder.h"
+
+#include "MPAPI.h"
+
+class nsMediaOmxReader : public nsBuiltinDecoderReader
+{
+  nsCString mType;
+  android::OmxDecoder *mOmxDecoder;
+  bool mHasVideo;
+  bool mHasAudio;
+  nsIntRect mPicture;
+  nsIntSize mInitialFrame;
+  int64_t mVideoSeekTimeUs;
+  int64_t mAudioSeekTimeUs;
+  VideoData *mLastVideoFrame;
+public:
+  nsMediaOmxReader(nsBuiltinDecoder* aDecoder);
+  ~nsMediaOmxReader();
+
+  virtual nsresult Init(nsBuiltinDecoderReader* aCloneDonor);
+  virtual nsresult ResetDecode();
+
+  virtual bool DecodeAudioData();
+  virtual bool DecodeVideoFrame(bool &aKeyframeSkip,
+                                int64_t aTimeThreshold);
+
+  virtual bool HasAudio()
+  {
+    return mHasAudio;
+  }
+
+  virtual bool HasVideo()
+  {
+    return mHasVideo;
+  }
+
+  virtual nsresult ReadMetadata(nsVideoInfo* aInfo,
+                                nsHTMLMediaElement::MetadataTags** aTags);
+  virtual nsresult Seek(int64_t aTime, int64_t aStartTime, int64_t aEndTime, int64_t aCurrentTime);
+  virtual nsresult GetBuffered(nsTimeRanges* aBuffered, int64_t aStartTime);
+  virtual bool IsSeekableInBufferedRanges() {
+    return true;
+  }
+
+};
+
+#endif