Bug 784329 - Part 4: Rename color conversion functions to use the standard OMX_COLOR enum names. r=doublec
authorChris Peterson <cpeterson@mozilla.com>
Fri, 14 Sep 2012 16:55:17 -0700
changeset 107757 4be1a01f58f3fb5c0438b249418c912a772a9f7d
parent 107756 2d94dbeb794cd8e29dfc4b6958125c2b3eb5f177
child 107758 092829268463c681b7a5a7d84425ee27fc035fd4
push id82
push usershu@rfrn.org
push dateFri, 05 Oct 2012 13:20:22 +0000
reviewersdoublec
bugs784329
milestone18.0a1
Bug 784329 - Part 4: Rename color conversion functions to use the standard OMX_COLOR enum names. r=doublec
media/omx-plugin/OmxPlugin.cpp
--- a/media/omx-plugin/OmxPlugin.cpp
+++ b/media/omx-plugin/OmxPlugin.cpp
@@ -17,16 +17,18 @@
 
 #include "android/log.h"
 
 #undef LOG
 #define LOG(args...)  __android_log_print(ANDROID_LOG_INFO, "OmxPlugin" , ## args)
 
 using namespace MPAPI;
 
+const int OMX_QCOM_COLOR_FormatYVU420PackedSemiPlanar32m4ka = 0x7FA30C01;
+
 namespace android {
 
 // MediaStreamSource is a DataSource that reads from a MPAPI media stream.
 
 class MediaStreamSource : public DataSource {
   PluginHost *mPluginHost;
 public:
   MediaStreamSource(PluginHost *aPluginHost, Decoder *aDecoder);
@@ -127,21 +129,21 @@ class OmxDecoder {
   AudioFrame mAudioFrame;
 
   // 'true' if a read from the audio stream was done while reading the metadata
   bool mAudioMetadataRead;
 
   void ReleaseVideoBuffer();
   void ReleaseAudioBuffer();
 
-  void PlanarYUV420Frame(VideoFrame *aFrame, int64_t aTimeUs, void *aData, size_t aSize, bool aKeyFrame);
-  void CbYCrYFrame(VideoFrame *aFrame, int64_t aTimeUs, void *aData, size_t aSize, bool aKeyFrame);
-  void SemiPlanarYUV420Frame(VideoFrame *aFrame, int64_t aTimeUs, void *aData, size_t aSize, bool aKeyFrame);
-  void SemiPlanarYVU420Frame(VideoFrame *aFrame, int64_t aTimeUs, void *aData, size_t aSize, bool aKeyFrame);
-  void SemiPlanarYVU420Packed32m4ka(VideoFrame *aFrame, int64_t aTimeUs, void *aData, size_t aSize, bool aKeyFrame);
+  void ToVideoFrame_YUV420Planar(VideoFrame *aFrame, int64_t aTimeUs, void *aData, size_t aSize, bool aKeyFrame);
+  void ToVideoFrame_CbYCrY(VideoFrame *aFrame, int64_t aTimeUs, void *aData, size_t aSize, bool aKeyFrame);
+  void ToVideoFrame_YUV420SemiPlanar(VideoFrame *aFrame, int64_t aTimeUs, void *aData, size_t aSize, bool aKeyFrame);
+  void ToVideoFrame_YVU420SemiPlanar(VideoFrame *aFrame, int64_t aTimeUs, void *aData, size_t aSize, bool aKeyFrame);
+  void ToVideoFrame_YVU420PackedSemiPlanar32m4ka(VideoFrame *aFrame, int64_t aTimeUs, void *aData, size_t aSize, bool aKeyFrame);
   bool ToVideoFrame(VideoFrame *aFrame, int64_t aTimeUs, void *aData, size_t aSize, bool aKeyFrame);
   bool ToAudioFrame(AudioFrame *aFrame, int64_t aTimeUs, void *aData, size_t aDataOffset, size_t aSize,
                     int32_t aAudioChannels, int32_t aAudioSampleRate);
 public:
   OmxDecoder(PluginHost *aPluginHost, Decoder *aDecoder);
   ~OmxDecoder();
 
   bool Init();
@@ -461,84 +463,79 @@ void OmxDecoder::ReleaseVideoBuffer() {
 
 void OmxDecoder::ReleaseAudioBuffer() {
   if (mAudioBuffer) {
     mAudioBuffer->release();
     mAudioBuffer = NULL;
   }
 }
 
-void OmxDecoder::PlanarYUV420Frame(VideoFrame *aFrame, int64_t aTimeUs, void *aData, size_t aSize, bool aKeyFrame) {
+void OmxDecoder::ToVideoFrame_YUV420Planar(VideoFrame *aFrame, int64_t aTimeUs, void *aData, size_t aSize, bool aKeyFrame) {
   void *y = aData;
   void *u = static_cast<uint8_t *>(y) + mVideoStride * mVideoSliceHeight;
   void *v = static_cast<uint8_t *>(u) + mVideoStride/2 * mVideoSliceHeight/2;
-
   aFrame->Set(aTimeUs, aKeyFrame,
               aData, aSize, mVideoStride, mVideoSliceHeight, mVideoRotation,
               y, mVideoStride, mVideoWidth, mVideoHeight, 0, 0,
               u, mVideoStride/2, mVideoWidth/2, mVideoHeight/2, 0, 0,
               v, mVideoStride/2, mVideoWidth/2, mVideoHeight/2, 0, 0);
 }
 
-void OmxDecoder::CbYCrYFrame(VideoFrame *aFrame, int64_t aTimeUs, void *aData, size_t aSize, bool aKeyFrame) {
+void OmxDecoder::ToVideoFrame_CbYCrY(VideoFrame *aFrame, int64_t aTimeUs, void *aData, size_t aSize, bool aKeyFrame) {
   aFrame->Set(aTimeUs, aKeyFrame,
               aData, aSize, mVideoStride, mVideoSliceHeight, mVideoRotation,
               aData, mVideoStride, mVideoWidth, mVideoHeight, 1, 1,
               aData, mVideoStride, mVideoWidth/2, mVideoHeight/2, 0, 3,
               aData, mVideoStride, mVideoWidth/2, mVideoHeight/2, 2, 3);
 }
 
-void OmxDecoder::SemiPlanarYUV420Frame(VideoFrame *aFrame, int64_t aTimeUs, void *aData, size_t aSize, bool aKeyFrame) {
+void OmxDecoder::ToVideoFrame_YUV420SemiPlanar(VideoFrame *aFrame, int64_t aTimeUs, void *aData, size_t aSize, bool aKeyFrame) {
   void *y = aData;
   void *uv = static_cast<uint8_t *>(y) + (mVideoStride * mVideoSliceHeight);
-
   aFrame->Set(aTimeUs, aKeyFrame,
               aData, aSize, mVideoStride, mVideoSliceHeight, mVideoRotation,
               y, mVideoStride, mVideoWidth, mVideoHeight, 0, 0,
               uv, mVideoStride, mVideoWidth/2, mVideoHeight/2, 0, 1,
               uv, mVideoStride, mVideoWidth/2, mVideoHeight/2, 1, 1);
 }
 
-void OmxDecoder::SemiPlanarYVU420Frame(VideoFrame *aFrame, int64_t aTimeUs, void *aData, size_t aSize, bool aKeyFrame) {
-  SemiPlanarYUV420Frame(aFrame, aTimeUs, aData, aSize, aKeyFrame);
+void OmxDecoder::ToVideoFrame_YVU420SemiPlanar(VideoFrame *aFrame, int64_t aTimeUs, void *aData, size_t aSize, bool aKeyFrame) {
+  ToVideoFrame_YUV420SemiPlanar(aFrame, aTimeUs, aData, aSize, aKeyFrame);
   aFrame->Cb.mOffset = 1;
   aFrame->Cr.mOffset = 0;
 }
 
-void OmxDecoder::SemiPlanarYVU420Packed32m4ka(VideoFrame *aFrame, int64_t aTimeUs, void *aData, size_t aSize, bool aKeyFrame) {
+void OmxDecoder::ToVideoFrame_YVU420PackedSemiPlanar32m4ka(VideoFrame *aFrame, int64_t aTimeUs, void *aData, size_t aSize, bool aKeyFrame) {
   size_t roundedSliceHeight = (mVideoSliceHeight + 31) & ~31;
   size_t roundedStride = (mVideoStride + 31) & ~31;
   void *y = aData;
   void *uv = static_cast<uint8_t *>(y) + (roundedStride * roundedSliceHeight);
   aFrame->Set(aTimeUs, aKeyFrame,
               aData, aSize, mVideoStride, mVideoSliceHeight, mVideoRotation,
               y, mVideoStride, mVideoWidth, mVideoHeight, 0, 0,
               uv, mVideoStride, mVideoWidth/2, mVideoHeight/2, 1, 1,
               uv, mVideoStride, mVideoWidth/2, mVideoHeight/2, 0, 1);
 }
 
 bool OmxDecoder::ToVideoFrame(VideoFrame *aFrame, int64_t aTimeUs, void *aData, size_t aSize, bool aKeyFrame) {
-  const int OMX_QCOM_COLOR_FormatYVU420SemiPlanar = 0x7FA30C00;
-  const int OMX_QCOM_COLOR_FormatYVU420PackedSemiPlanar32m4ka = 0x7FA30C01;
-
   switch (mVideoColorFormat) {
-  case OMX_COLOR_FormatYUV420Planar:
-    PlanarYUV420Frame(aFrame, aTimeUs, aData, aSize, aKeyFrame);
+  case OMX_COLOR_FormatYUV420Planar: // e.g. Asus Transformer, Stagefright's software decoder
+    ToVideoFrame_YUV420Planar(aFrame, aTimeUs, aData, aSize, aKeyFrame);
+    break;
+  case OMX_COLOR_FormatCbYCrY: // e.g. Droid 1
+    ToVideoFrame_CbYCrY(aFrame, aTimeUs, aData, aSize, aKeyFrame);
     break;
-  case OMX_COLOR_FormatCbYCrY:
-    CbYCrYFrame(aFrame, aTimeUs, aData, aSize, aKeyFrame);
-    break;
-  case OMX_COLOR_FormatYUV420SemiPlanar:
-    SemiPlanarYUV420Frame(aFrame, aTimeUs, aData, aSize, aKeyFrame);
+  case OMX_COLOR_FormatYUV420SemiPlanar: // e.g. Galaxy S III
+    ToVideoFrame_YUV420SemiPlanar(aFrame, aTimeUs, aData, aSize, aKeyFrame);
     break;
-  case OMX_QCOM_COLOR_FormatYVU420SemiPlanar:
-    SemiPlanarYVU420Frame(aFrame, aTimeUs, aData, aSize, aKeyFrame);
+  case OMX_QCOM_COLOR_FormatYVU420SemiPlanar: // e.g. Nexus One
+    ToVideoFrame_YVU420SemiPlanar(aFrame, aTimeUs, aData, aSize, aKeyFrame);
     break;
-  case OMX_QCOM_COLOR_FormatYVU420PackedSemiPlanar32m4ka:
-    SemiPlanarYVU420Packed32m4ka(aFrame, aTimeUs, aData, aSize, aKeyFrame);
+  case OMX_QCOM_COLOR_FormatYVU420PackedSemiPlanar32m4ka: // e.g. Otoro
+    ToVideoFrame_YVU420PackedSemiPlanar32m4ka(aFrame, aTimeUs, aData, aSize, aKeyFrame);
     break;
   default:
     LOG("Unknown video color format: %#x", mVideoColorFormat);
     return false;
   }
   return true;
 }