Bug 1229363 - support sw video codec. r=sotaro
authorAlfredo Yang <ayang@mozilla.com>
Wed, 13 Jan 2016 18:53:00 +0100
changeset 280028 4be86232c89c4ef3d9d0c1f3b8a6f9c9b6c2b5a4
parent 280027 8a2d17f7f3d124477a9fdde11991a598cdeca821
child 280029 d83462d4d1486409701ba89cf86e5dfa62003b9e
push id29900
push usercbook@mozilla.com
push dateFri, 15 Jan 2016 10:47:20 +0000
treeherdermozilla-central@e1486d83107f [default view] [failures only]
perfherder[talos] [build metrics] [platform microbench] (compared to previous push)
reviewerssotaro
bugs1229363
milestone46.0a1
first release with
nightly linux32
nightly linux64
nightly mac
nightly win32
nightly win64
last release without
nightly linux32
nightly linux64
nightly mac
nightly win32
nightly win64
Bug 1229363 - support sw video codec. r=sotaro
dom/media/platforms/omx/GonkOmxPlatformLayer.cpp
dom/media/platforms/omx/OmxDataDecoder.cpp
dom/media/platforms/omx/OmxDataDecoder.h
dom/media/platforms/omx/OmxDecoderModule.cpp
dom/media/platforms/omx/OmxPromiseLayer.cpp
--- a/dom/media/platforms/omx/GonkOmxPlatformLayer.cpp
+++ b/dom/media/platforms/omx/GonkOmxPlatformLayer.cpp
@@ -12,16 +12,17 @@
 #include "mozilla/Monitor.h"
 #include "mozilla/layers/TextureClient.h"
 #include "mozilla/layers/GrallocTextureClient.h"
 #include "mozilla/layers/ImageBridgeChild.h"
 #include <binder/MemoryDealer.h>
 #include <media/IOMX.h>
 #include <utils/List.h>
 #include <media/stagefright/OMXCodec.h>
+#include <cutils/properties.h>
 
 extern mozilla::LogModule* GetPDMLog();
 
 #ifdef LOG
 #undef LOG
 #endif
 
 #define LOG(arg, ...) MOZ_LOG(GetPDMLog(), mozilla::LogLevel::Debug, ("GonkOmxPlatformLayer:: " arg, ##__VA_ARGS__))
@@ -38,16 +39,23 @@ namespace mozilla {
 
 extern void GetPortIndex(nsTArray<uint32_t>& aPortIndex);
 
 bool IsSoftwareCodec(const char* aComponentName) {
   nsAutoCString str(aComponentName);
   return (str.Find(NS_LITERAL_CSTRING("OMX.google.")) == -1 ? false : true);
 }
 
+bool IsInEmulator()
+{
+  char propQemu[PROPERTY_VALUE_MAX];
+  property_get("ro.kernel.qemu", propQemu, "");
+  return !strncmp(propQemu, "1", 1);
+}
+
 class GonkOmxObserver : public BnOMXObserver {
 public:
   void onMessage(const omx_message& aMsg)
   {
     switch (aMsg.type) {
       case omx_message::EVENT:
       {
         sp<GonkOmxObserver> self = this;
@@ -284,17 +292,20 @@ GonkBufferData::InitGraphicBuffer(OMX_VI
 already_AddRefed<MediaData>
 GonkBufferData::GetPlatformMediaData()
 {
   if (mGonkPlatformLayer->GetTrackInfo()->GetAsAudioInfo()) {
     // This is audio decoding.
     return nullptr;
   }
 
-  MOZ_RELEASE_ASSERT(mTextureClientRecycleHandler);
+  if (!mTextureClientRecycleHandler) {
+    // There is no GraphicBuffer, it should fallback to normal YUV420 VideoData.
+    return nullptr;
+  }
 
   VideoInfo info;
   info.mDisplay = mGonkPlatformLayer->GetTrackInfo()->GetAsVideoInfo()->mDisplay;
   info.mImage = mGonkPlatformLayer->GetTrackInfo()->GetAsVideoInfo()->mImage;
   RefPtr<VideoData> data = VideoData::Create(info,
                                              mGonkPlatformLayer->GetImageContainer(),
                                              0,
                                              mBuffer->nTimeStamp,
@@ -515,45 +526,49 @@ GonkOmxPlatformLayer::InitOmxToStateLoad
   if (err != OK) {
       return OMX_ErrorUndefined;
   }
   mOmx = mOmxClient.interface();
   if (!mOmx.get()) {
     return OMX_ErrorUndefined;
   }
 
+  bool useHardwareCodecOnly = false;
+
+  // H264 and H263 has different profiles, software codec doesn't support high profile.
+  // So we use hardware codec only.
+  if (!IsInEmulator() &&
+      (mInfo->mMimeType.EqualsLiteral("video/avc") ||
+       mInfo->mMimeType.EqualsLiteral("video/mp4") ||
+       mInfo->mMimeType.EqualsLiteral("video/mp4v-es") ||
+       mInfo->mMimeType.EqualsLiteral("video/3gp"))) {
+    useHardwareCodecOnly = true;
+  }
+
+  LOG("find componenet for mime type %s", mInfo->mMimeType.Data());
   // In Gonk, the software component name has prefix "OMX.google". It needs to
   // have a way to use hardware codec first.
   android::Vector<OMXCodec::CodecNameAndQuirks> matchingCodecs;
-  const char* swcomponent = nullptr;
+  nsTArray<const char*> components;
   OMXCodec::findMatchingCodecs(mInfo->mMimeType.Data(),
                                0,
                                nullptr,
                                0,
                                &matchingCodecs);
   for (uint32_t i = 0; i < matchingCodecs.size(); i++) {
-    const char* componentName = matchingCodecs.itemAt(i).mName.string();
-    if (IsSoftwareCodec(componentName)) {
-      swcomponent = componentName;
-    } else {
-      // Try to use hardware codec first.
-      if (LoadComponent(componentName)) {
-        mUsingHardwareCodec = true;
-        return OMX_ErrorNone;
-      }
-      LOG("failed to load component %s", componentName);
-    }
+    components.AppendElement(matchingCodecs.itemAt(i).mName.string());
   }
 
-  // TODO: in android ICS, the software codec is allocated in mediaserver by
-  //       default, it may be necessary to allocate it in local process.
-  //
-  // fallback to sw codec
-  if (swcomponent && LoadComponent(swcomponent)) {
-    return OMX_ErrorNone;
+  for (auto name : components) {
+    if (IsSoftwareCodec(name) && useHardwareCodecOnly) {
+      continue;
+    }
+    if (LoadComponent(name)) {
+      return OMX_ErrorNone;
+    }
   }
 
   LOG("no component is loaded");
   return OMX_ErrorUndefined;
 }
 
 OMX_ERRORTYPE
 GonkOmxPlatformLayer::EmptyThisBuffer(BufferData* aData)
--- a/dom/media/platforms/omx/OmxDataDecoder.cpp
+++ b/dom/media/platforms/omx/OmxDataDecoder.cpp
@@ -18,16 +18,18 @@ extern mozilla::LogModule* GetPDMLog();
 #define LOG(arg, ...) MOZ_LOG(GetPDMLog(), mozilla::LogLevel::Debug, ("OmxDataDecoder::%s: " arg, __func__, ##__VA_ARGS__))
 
 #define CHECK_OMX_ERR(err)     \
   if (err != OMX_ErrorNone) {  \
     NotifyError(err, __func__);\
     return;                    \
   }                            \
 
+// There should be a better way to calculate it.
+#define MIN_VIDEO_INPUT_BUFFER_SIZE 64 * 1024
 
 namespace mozilla {
 
 static const char*
 StateTypeToStr(OMX_STATETYPE aType)
 {
   MOZ_ASSERT(aType == OMX_StateLoaded ||
              aType == OMX_StateIdle ||
@@ -55,29 +57,69 @@ StateTypeToStr(OMX_STATETYPE aType)
 }
 
 // There should be 2 ports and port number start from 0.
 void GetPortIndex(nsTArray<uint32_t>& aPortIndex) {
   aPortIndex.AppendElement(0);
   aPortIndex.AppendElement(1);
 }
 
+template<class T> void
+InitOmxParameter(T* aParam)
+{
+  PodZero(aParam);
+  aParam->nSize = sizeof(T);
+  aParam->nVersion.s.nVersionMajor = 1;
+}
+
+// A helper class to retrieve AudioData or VideoData.
+class MediaDataHelper {
+protected:
+  virtual ~MediaDataHelper() {}
+
+public:
+  NS_INLINE_DECL_THREADSAFE_REFCOUNTING(MediaDataHelper)
+
+  MediaDataHelper(const TrackInfo* aTrackInfo,
+                  layers::ImageContainer* aImageContainer,
+                  OmxPromiseLayer* aOmxLayer);
+
+  already_AddRefed<MediaData> GetMediaData(BufferData* aBufferData, bool& aPlatformDepenentData);
+
+protected:
+  already_AddRefed<AudioData> CreateAudioData(BufferData* aBufferData);
+
+  already_AddRefed<VideoData> CreateYUV420VideoData(BufferData* aBufferData);
+
+  const TrackInfo* mTrackInfo;
+
+  OMX_PARAM_PORTDEFINITIONTYPE mOutputPortDef;
+
+  // audio output
+  MediaQueue<AudioData> mAudioQueue;
+
+  AudioCompactor mAudioCompactor;
+
+  // video output
+  RefPtr<layers::ImageContainer> mImageContainer;
+};
+
 OmxDataDecoder::OmxDataDecoder(const TrackInfo& aTrackInfo,
                                MediaDataDecoderCallback* aCallback,
                                layers::ImageContainer* aImageContainer)
   : mMonitor("OmxDataDecoder")
   , mOmxTaskQueue(CreateMediaDecodeTaskQueue())
+  , mImageContainer(aImageContainer)
   , mWatchManager(this, mOmxTaskQueue)
   , mOmxState(OMX_STATETYPE::OMX_StateInvalid, "OmxDataDecoder::mOmxState")
   , mTrackInfo(aTrackInfo.Clone())
   , mFlushing(false)
   , mShuttingDown(false)
   , mCheckingInputExhausted(false)
   , mPortSettingsChanged(-1, "OmxDataDecoder::mPortSettingsChanged")
-  , mAudioCompactor(mAudioQueue)
   , mCallback(aCallback)
 {
   LOG("(%p)", this);
   mOmxLayer = new OmxPromiseLayer(mOmxTaskQueue, this, aImageContainer);
 
   nsCOMPtr<nsIRunnable> r =
     NS_NewRunnableMethod(this, &OmxDataDecoder::InitializationTask);
   mOmxTaskQueue->Dispatch(r.forget());
@@ -267,107 +309,35 @@ OmxDataDecoder::DoAsyncShutdown()
            })
     ->CompletionPromise()
     ->Then(mOmxTaskQueue, __func__,
            [self] () {
              LOG("DoAsyncShutdown: OMX_StateLoaded, it is safe to shutdown omx");
              self->mOmxLayer->Shutdown();
              self->mWatchManager.Shutdown();
              self->mOmxLayer = nullptr;
+             self->mMediaDataHelper = nullptr;
 
              MonitorAutoLock lock(self->mMonitor);
              self->mShuttingDown = false;
              self->mMonitor.Notify();
            },
            [self] () {
              self->mOmxLayer->Shutdown();
              self->mWatchManager.Shutdown();
              self->mOmxLayer = nullptr;
+             self->mMediaDataHelper = nullptr;
 
              MonitorAutoLock lock(self->mMonitor);
              self->mShuttingDown = false;
              self->mMonitor.Notify();
            });
 }
 
 void
-OmxDataDecoder::OutputAudio(BufferData* aBufferData)
-{
-  // TODO: it'd be better to move these code to BufferData::GetPlatformMediaData() or
-  //       some kind of abstract layer.
-  MOZ_ASSERT(mOmxTaskQueue->IsCurrentThreadIn());
-  OMX_BUFFERHEADERTYPE* buf = aBufferData->mBuffer;
-  AudioInfo* info = mTrackInfo->GetAsAudioInfo();
-  if (buf->nFilledLen) {
-    uint64_t offset = 0;
-    uint32_t frames = buf->nFilledLen / (2 * info->mChannels);
-    if (aBufferData->mRawData) {
-      offset = aBufferData->mRawData->mOffset;
-    }
-    typedef AudioCompactor::NativeCopy OmxCopy;
-    mAudioCompactor.Push(offset,
-                         buf->nTimeStamp,
-                         info->mRate,
-                         frames,
-                         info->mChannels,
-                         OmxCopy(buf->pBuffer + buf->nOffset,
-                                 buf->nFilledLen,
-                                 info->mChannels));
-    RefPtr<AudioData> audio = mAudioQueue.PopFront();
-    mCallback->Output(audio);
-  }
-  aBufferData->mStatus = BufferData::BufferStatus::FREE;
-}
-
-void
-OmxDataDecoder::OutputVideo(BufferData* aBufferData)
-{
-  MOZ_ASSERT(mOmxTaskQueue->IsCurrentThreadIn());
-
-  RefPtr<MediaData> data = aBufferData->GetPlatformMediaData();
-  MOZ_RELEASE_ASSERT(data);
-
-  VideoData* video(data->As<VideoData>());
-  if (aBufferData->mRawData) {
-    video->mTime = aBufferData->mRawData->mTime;
-    video->mTimecode = aBufferData->mRawData->mTimecode;
-    video->mOffset = aBufferData->mRawData->mOffset;
-    video->mDuration = aBufferData->mRawData->mDuration;
-    video->mKeyframe = aBufferData->mRawData->mKeyframe;
-  }
-
-  aBufferData->mStatus = BufferData::BufferStatus::OMX_CLIENT_OUTPUT;
-
-  // TextureClient's recycle callback is called when reference count of
-  // TextureClient becomes 1. In most cases, the last reference count is held
-  // by ITextureClientRecycleAllocator.
-  // And then promise will be resolved in the callback.
-  // TODO:
-  //   Because it is gonk specific behaviour, it needs to find a way to
-  //   proper abstracting it.
-  MOZ_RELEASE_ASSERT(aBufferData->mPromise.IsEmpty());
-  RefPtr<OmxBufferPromise> p = aBufferData->mPromise.Ensure(__func__);
-
-  RefPtr<OmxDataDecoder> self = this;
-  RefPtr<BufferData> buffer = aBufferData;
-  p->Then(mOmxTaskQueue, __func__,
-          [self, buffer] () {
-            MOZ_RELEASE_ASSERT(buffer->mStatus == BufferData::BufferStatus::OMX_CLIENT_OUTPUT);
-            buffer->mStatus = BufferData::BufferStatus::FREE;
-            self->FillAndEmptyBuffers();
-          },
-          [buffer] () {
-            MOZ_RELEASE_ASSERT(buffer->mStatus == BufferData::BufferStatus::OMX_CLIENT_OUTPUT);
-            buffer->mStatus = BufferData::BufferStatus::FREE;
-          });
-
-  mCallback->Output(video);
-}
-
-void
 OmxDataDecoder::FillBufferDone(BufferData* aData)
 {
   MOZ_ASSERT(!aData || aData->mStatus == BufferData::BufferStatus::OMX_CLIENT);
 
   // Don't output sample when flush or shutting down, especially for video
   // decoded frame. Because video decoded frame has a promise in BufferData
   // waiting for layer to resolve it via recycle callback on Gonk, if other
   // module doesn't send it to layer, it will cause a unresolved promise and
@@ -378,28 +348,64 @@ OmxDataDecoder::FillBufferDone(BufferDat
     return;
   }
 
   if (aData->mBuffer->nFlags & OMX_BUFFERFLAG_EOS) {
     // Reach eos, it's an empty data so it doesn't need to output.
     EndOfStream();
     aData->mStatus = BufferData::BufferStatus::FREE;
   } else {
-    if (mTrackInfo->IsAudio()) {
-      OutputAudio(aData);
-    } else if (mTrackInfo->IsVideo()) {
-      OutputVideo(aData);
-    } else {
-      MOZ_ASSERT(0);
-    }
+    Output(aData);
     FillAndEmptyBuffers();
   }
 }
 
 void
+OmxDataDecoder::Output(BufferData* aData)
+{
+  if (!mMediaDataHelper) {
+    mMediaDataHelper = new MediaDataHelper(mTrackInfo.get(), mImageContainer, mOmxLayer);
+  }
+
+  bool isPlatformData = false;
+  RefPtr<MediaData> data = mMediaDataHelper->GetMediaData(aData, isPlatformData);
+  if (!data) {
+    aData->mStatus = BufferData::BufferStatus::FREE;
+    return;
+  }
+
+  if (isPlatformData) {
+    // If the MediaData is platform dependnet data, it's mostly a kind of
+    // limited resource, for example, GraphicBuffer on Gonk. So we use promise
+    // to notify when the resource is free.
+    aData->mStatus = BufferData::BufferStatus::OMX_CLIENT_OUTPUT;
+
+    MOZ_RELEASE_ASSERT(aData->mPromise.IsEmpty());
+    RefPtr<OmxBufferPromise> p = aData->mPromise.Ensure(__func__);
+
+    RefPtr<OmxDataDecoder> self = this;
+    RefPtr<BufferData> buffer = aData;
+    p->Then(mOmxTaskQueue, __func__,
+        [self, buffer] () {
+          MOZ_RELEASE_ASSERT(buffer->mStatus == BufferData::BufferStatus::OMX_CLIENT_OUTPUT);
+          buffer->mStatus = BufferData::BufferStatus::FREE;
+          self->FillAndEmptyBuffers();
+        },
+        [buffer] () {
+          MOZ_RELEASE_ASSERT(buffer->mStatus == BufferData::BufferStatus::OMX_CLIENT_OUTPUT);
+          buffer->mStatus = BufferData::BufferStatus::FREE;
+        });
+  } else {
+    aData->mStatus = BufferData::BufferStatus::FREE;
+  }
+
+  mCallback->Output(data);
+}
+
+void
 OmxDataDecoder::FillBufferFailure(OmxBufferFailureHolder aFailureHolder)
 {
   NotifyError(aFailureHolder.mError, __func__);
 }
 
 void
 OmxDataDecoder::EmptyBufferDone(BufferData* aData)
 {
@@ -510,16 +516,17 @@ OmxDataDecoder::FindAvailableBuffer(OMX_
 {
   BUFFERLIST* buffers = GetBuffers(aType);
 
   for (uint32_t i = 0; i < buffers->Length(); i++) {
     BufferData* buf = buffers->ElementAt(i);
     if (buf->mStatus == BufferData::BufferStatus::FREE) {
       return buf;
     }
+    LOG("buffer is owned by %d, type %d", buf->mStatus, aType);
   }
 
   return nullptr;
 }
 
 nsresult
 OmxDataDecoder::AllocateBuffers(OMX_DIRTYPE aType)
 {
@@ -684,16 +691,20 @@ OmxDataDecoder::ConfigVideoCodec()
     OMX_VIDEO_CODINGTYPE codetype;
     if (videoInfo->mMimeType.EqualsLiteral("video/avc")) {
       codetype = OMX_VIDEO_CodingAVC;
     }
 
     if (def.eDir == OMX_DirInput) {
       def.format.video.eCompressionFormat = codetype;
       def.format.video.eColorFormat = OMX_COLOR_FormatUnused;
+      if (def.nBufferSize < MIN_VIDEO_INPUT_BUFFER_SIZE) {
+        def.nBufferSize = videoInfo->mImage.width * videoInfo->mImage.height;
+        LOG("Change input buffer size to %d", def.nBufferSize);
+      }
     } else {
       def.format.video.eCompressionFormat = OMX_VIDEO_CodingUnused;
     }
 
     err = mOmxLayer->SetParameter(OMX_IndexParamPortDefinition,
                                   &def,
                                   sizeof(def));
     if (err != OMX_ErrorNone) {
@@ -767,24 +778,16 @@ OmxDataDecoder::Event(OMX_EVENTTYPE aEve
           aEvent, aData1, aData2);
       return false;
     }
   }
 
   return true;
 }
 
-template<class T> void
-OmxDataDecoder::InitOmxParameter(T* aParam)
-{
-  PodZero(aParam);
-  aParam->nSize = sizeof(T);
-  aParam->nVersion.s.nVersionMajor = 1;
-}
-
 bool
 OmxDataDecoder::BuffersCanBeReleased(OMX_DIRTYPE aType)
 {
   BUFFERLIST* buffers = GetBuffers(aType);
   uint32_t len = buffers->Length();
   for (uint32_t i = 0; i < len; i++) {
     BufferData::BufferStatus buf_status = buffers->ElementAt(i)->mStatus;
     if (buf_status == BufferData::BufferStatus::OMX_COMPONENT ||
@@ -970,9 +973,155 @@ void OmxDataDecoder::FlushFailure(OmxCom
 {
   NotifyError(OMX_ErrorUndefined, __func__);
   mFlushing = false;
 
   MonitorAutoLock lock(mMonitor);
   mMonitor.Notify();
 }
 
+MediaDataHelper::MediaDataHelper(const TrackInfo* aTrackInfo,
+                                 layers::ImageContainer* aImageContainer,
+                                 OmxPromiseLayer* aOmxLayer)
+  : mTrackInfo(aTrackInfo)
+  , mAudioCompactor(mAudioQueue)
+  , mImageContainer(aImageContainer)
+{
+  // Get latest port definition.
+  nsTArray<uint32_t> ports;
+  GetPortIndex(ports);
+  for (auto idx : ports) {
+    InitOmxParameter(&mOutputPortDef);
+    mOutputPortDef.nPortIndex = idx;
+    aOmxLayer->GetParameter(OMX_IndexParamPortDefinition, &mOutputPortDef, sizeof(mOutputPortDef));
+    if (mOutputPortDef.eDir == OMX_DirOutput) {
+      break;
+    }
+  }
 }
+
+already_AddRefed<MediaData>
+MediaDataHelper::GetMediaData(BufferData* aBufferData, bool& aPlatformDepenentData)
+{
+  aPlatformDepenentData = false;
+  RefPtr<MediaData> data;
+
+  if (mTrackInfo->IsAudio()) {
+    if (!aBufferData->mBuffer->nFilledLen) {
+      return nullptr;
+    }
+    data = CreateAudioData(aBufferData);
+  } else if (mTrackInfo->IsVideo()) {
+    data = aBufferData->GetPlatformMediaData();
+    if (data) {
+      aPlatformDepenentData = true;
+    } else {
+      if (!aBufferData->mBuffer->nFilledLen) {
+        return nullptr;
+      }
+      // Get YUV VideoData, it uses more CPU, in most cases, on software codec.
+      data = CreateYUV420VideoData(aBufferData);
+    }
+
+    // Update video time code, duration... from the raw data.
+    VideoData* video(data->As<VideoData>());
+    if (aBufferData->mRawData) {
+      video->mTime = aBufferData->mRawData->mTime;
+      video->mTimecode = aBufferData->mRawData->mTimecode;
+      video->mOffset = aBufferData->mRawData->mOffset;
+      video->mDuration = aBufferData->mRawData->mDuration;
+      video->mKeyframe = aBufferData->mRawData->mKeyframe;
+    }
+  }
+
+  return data.forget();
+}
+
+already_AddRefed<AudioData>
+MediaDataHelper::CreateAudioData(BufferData* aBufferData)
+{
+  RefPtr<AudioData> audio;
+  OMX_BUFFERHEADERTYPE* buf = aBufferData->mBuffer;
+  const AudioInfo* info = mTrackInfo->GetAsAudioInfo();
+  if (buf->nFilledLen) {
+    uint64_t offset = 0;
+    uint32_t frames = buf->nFilledLen / (2 * info->mChannels);
+    if (aBufferData->mRawData) {
+      offset = aBufferData->mRawData->mOffset;
+    }
+    typedef AudioCompactor::NativeCopy OmxCopy;
+    mAudioCompactor.Push(offset,
+                         buf->nTimeStamp,
+                         info->mRate,
+                         frames,
+                         info->mChannels,
+                         OmxCopy(buf->pBuffer + buf->nOffset,
+                                 buf->nFilledLen,
+                                 info->mChannels));
+    audio = mAudioQueue.PopFront();
+  }
+
+  return audio.forget();
+}
+
+already_AddRefed<VideoData>
+MediaDataHelper::CreateYUV420VideoData(BufferData* aBufferData)
+{
+  uint8_t *yuv420p_buffer = (uint8_t *)aBufferData->mBuffer->pBuffer;
+  int32_t stride = mOutputPortDef.format.video.nStride;
+  int32_t slice_height = mOutputPortDef.format.video.nSliceHeight;
+  int32_t width = mTrackInfo->GetAsVideoInfo()->mImage.width;
+  int32_t height = mTrackInfo->GetAsVideoInfo()->mImage.height;
+
+  // TODO: convert other formats to YUV420.
+  if (mOutputPortDef.format.video.eColorFormat != OMX_COLOR_FormatYUV420Planar) {
+    return nullptr;
+  }
+
+  size_t yuv420p_y_size = stride * slice_height;
+  size_t yuv420p_u_size = ((stride + 1) / 2) * ((slice_height + 1) / 2);
+  uint8_t *yuv420p_y = yuv420p_buffer;
+  uint8_t *yuv420p_u = yuv420p_y + yuv420p_y_size;
+  uint8_t *yuv420p_v = yuv420p_u + yuv420p_u_size;
+
+  VideoData::YCbCrBuffer b;
+  b.mPlanes[0].mData = yuv420p_y;
+  b.mPlanes[0].mWidth = width;
+  b.mPlanes[0].mHeight = height;
+  b.mPlanes[0].mStride = stride;
+  b.mPlanes[0].mOffset = 0;
+  b.mPlanes[0].mSkip = 0;
+
+  b.mPlanes[1].mData = yuv420p_u;
+  b.mPlanes[1].mWidth = (width + 1) / 2;
+  b.mPlanes[1].mHeight = (height + 1) / 2;
+  b.mPlanes[1].mStride = (stride + 1) / 2;
+  b.mPlanes[1].mOffset = 0;
+  b.mPlanes[1].mSkip = 0;
+
+  b.mPlanes[2].mData = yuv420p_v;
+  b.mPlanes[2].mWidth =(width + 1) / 2;
+  b.mPlanes[2].mHeight = (height + 1) / 2;
+  b.mPlanes[2].mStride = (stride + 1) / 2;
+  b.mPlanes[2].mOffset = 0;
+  b.mPlanes[2].mSkip = 0;
+
+  VideoInfo info;
+  info.mDisplay = mTrackInfo->GetAsVideoInfo()->mDisplay;
+  info.mImage = mTrackInfo->GetAsVideoInfo()->mImage;
+  RefPtr<VideoData> data = VideoData::Create(info,
+                                             mImageContainer,
+                                             0, // Filled later by caller.
+                                             0, // Filled later by caller.
+                                             1, // We don't know the duration.
+                                             b,
+                                             0, // Filled later by caller.
+                                             -1,
+                                             info.mImage);
+
+  LOG("YUV420 VideoData: disp width %d, height %d, pic width %d, height %d, time %ld",
+      info.mDisplay.width, info.mDisplay.height, info.mImage.width,
+      info.mImage.height, aBufferData->mBuffer->nTimeStamp);
+
+  return data.forget();
+}
+
+}
--- a/dom/media/platforms/omx/OmxDataDecoder.h
+++ b/dom/media/platforms/omx/OmxDataDecoder.h
@@ -7,19 +7,23 @@
 #if !defined(OmxDataDecoder_h_)
 #define OmxDataDecoder_h_
 
 #include "mozilla/Monitor.h"
 #include "PlatformDecoderModule.h"
 #include "OmxPromiseLayer.h"
 #include "MediaInfo.h"
 #include "AudioCompactor.h"
+#include "OMX_Component.h"
+#include "ImageContainer.h"
 
 namespace mozilla {
 
+class MediaDataHelper;
+
 typedef OmxPromiseLayer::OmxCommandPromise OmxCommandPromise;
 typedef OmxPromiseLayer::OmxBufferPromise OmxBufferPromise;
 typedef OmxPromiseLayer::OmxBufferFailureHolder OmxBufferFailureHolder;
 typedef OmxPromiseLayer::OmxCommandFailureHolder OmxCommandFailureHolder;
 typedef OmxPromiseLayer::BufferData BufferData;
 typedef OmxPromiseLayer::BUFFERLIST BUFFERLIST;
 
 /* OmxDataDecoder is the major class which performs followings:
@@ -105,19 +109,17 @@ protected:
   void SendEosBuffer();
 
   void EndOfStream();
 
   // It could be called after codec specific data is sent and component found
   // the port format is changed due to different codec specific.
   void PortSettingsChanged();
 
-  void OutputAudio(BufferData* aBufferData);
-
-  void OutputVideo(BufferData* aBufferData);
+  void Output(BufferData* aData);
 
   // Buffer can be released if its status is not OMX_COMPONENT or
   // OMX_CLIENT_OUTPUT.
   bool BuffersCanBeReleased(OMX_DIRTYPE aType);
 
   OMX_DIRTYPE GetPortDirection(uint32_t aPortIndex);
 
   void DoAsyncShutdown();
@@ -131,29 +133,29 @@ protected:
   BUFFERLIST* GetBuffers(OMX_DIRTYPE aType);
 
   nsresult AllocateBuffers(OMX_DIRTYPE aType);
 
   nsresult ReleaseBuffers(OMX_DIRTYPE aType);
 
   BufferData* FindAvailableBuffer(OMX_DIRTYPE aType);
 
-  template<class T> void InitOmxParameter(T* aParam);
-
   // aType could be OMX_DirMax for all types.
   RefPtr<OmxPromiseLayer::OmxBufferPromise::AllPromiseType>
   CollectBufferPromises(OMX_DIRTYPE aType);
 
   Monitor mMonitor;
 
   // The Omx TaskQueue.
   RefPtr<TaskQueue> mOmxTaskQueue;
 
   RefPtr<TaskQueue> mReaderTaskQueue;
 
+  RefPtr<layers::ImageContainer> mImageContainer;
+
   WatchManager<OmxDataDecoder> mWatchManager;
 
   // It is accessed in omx TaskQueue.
   Watchable<OMX_STATETYPE> mOmxState;
 
   RefPtr<OmxPromiseLayer> mOmxLayer;
 
   UniquePtr<TrackInfo> mTrackInfo;
@@ -180,21 +182,16 @@ protected:
 
   // It is access in Omx TaskQueue.
   nsTArray<RefPtr<MediaRawData>> mMediaRawDatas;
 
   BUFFERLIST mInPortBuffers;
 
   BUFFERLIST mOutPortBuffers;
 
-  // For audio output.
-  // TODO: because this class is for both video and audio decoding, so there
-  // should be some kind of abstract things to these members.
-  MediaQueue<AudioData> mAudioQueue;
-
-  AudioCompactor mAudioCompactor;
+  RefPtr<MediaDataHelper> mMediaDataHelper;
 
   MediaDataDecoderCallback* mCallback;
 };
 
 }
 
 #endif /* OmxDataDecoder_h_ */
--- a/dom/media/platforms/omx/OmxDecoderModule.cpp
+++ b/dom/media/platforms/omx/OmxDecoderModule.cpp
@@ -39,13 +39,18 @@ PlatformDecoderModule::ConversionRequire
 OmxDecoderModule::DecoderNeedsConversion(const TrackInfo& aConfig) const
 {
   return kNeedNone;
 }
 
 bool
 OmxDecoderModule::SupportsMimeType(const nsACString& aMimeType) const
 {
+  // TODO: it could be better to query the support mine types from OMX instead
+  // of hard coding.
   return aMimeType.EqualsLiteral("audio/mp4a-latm") ||
+         aMimeType.EqualsLiteral("video/mp4v-es") ||
+         aMimeType.EqualsLiteral("video/mp4") ||
+         aMimeType.EqualsLiteral("video/3gp") ||
          aMimeType.EqualsLiteral("video/avc");
 }
 
 }
--- a/dom/media/platforms/omx/OmxPromiseLayer.cpp
+++ b/dom/media/platforms/omx/OmxPromiseLayer.cpp
@@ -114,17 +114,17 @@ OmxPromiseLayer::GetBufferHolders(OMX_DI
 
   return &mOutbufferHolders;
 }
 
 already_AddRefed<MediaRawData>
 OmxPromiseLayer::FindAndRemoveRawData(OMX_TICKS aTimecode)
 {
   for (auto raw : mRawDatas) {
-    if (raw->mTimecode == aTimecode) {
+    if (raw->mTime == aTimecode) {
       mRawDatas.RemoveElement(raw);
       return raw.forget();
     }
   }
   return nullptr;
 }
 
 already_AddRefed<BufferData>