Bug 1033903 - Support GraphicBuffer in MediaCodecReader. r=cpearce, r=sotaro
authorBruce Sun <brsun@mozilla.com>
Thu, 13 Nov 2014 11:26:13 +0800
changeset 215534 87e94129e7b37f7afd0daa8d22a2bf1298898391
parent 215533 0c9407b0e481e1718458d116050b0817aeacb905
child 215535 2f9619594cf0fe1ab76ebd92de20791226bf8f75
push id51785
push userryanvm@gmail.com
push dateThu, 13 Nov 2014 17:03:45 +0000
treeherdermozilla-inbound@b8613576f657 [default view] [failures only]
perfherder[talos] [build metrics] [platform microbench] (compared to previous push)
reviewerscpearce, sotaro
bugs1033903
milestone36.0a1
first release with
nightly linux32
nightly linux64
nightly mac
nightly win32
nightly win64
last release without
nightly linux32
nightly linux64
nightly mac
nightly win32
nightly win64
Bug 1033903 - Support GraphicBuffer in MediaCodecReader. r=cpearce, r=sotaro
dom/media/omx/MediaCodecReader.cpp
dom/media/omx/MediaCodecReader.h
--- a/dom/media/omx/MediaCodecReader.cpp
+++ b/dom/media/omx/MediaCodecReader.cpp
@@ -6,29 +6,32 @@
 
 #include "MediaCodecReader.h"
 
 #include <OMX_IVCommon.h>
 
 #include <gui/Surface.h>
 #include <ICrypto.h>
 
+#include "GonkNativeWindow.h"
+
 #include <stagefright/foundation/ABuffer.h>
 #include <stagefright/foundation/ADebug.h>
 #include <stagefright/foundation/ALooper.h>
 #include <stagefright/foundation/AMessage.h>
 #include <stagefright/MediaBuffer.h>
 #include <stagefright/MediaCodec.h>
 #include <stagefright/MediaDefs.h>
 #include <stagefright/MediaExtractor.h>
 #include <stagefright/MediaSource.h>
 #include <stagefright/MetaData.h>
 #include <stagefright/Utils.h>
 
 #include "mozilla/TimeStamp.h"
+#include "mozilla/layers/GrallocTextureClient.h"
 
 #include "gfx2DGlue.h"
 
 #include "MediaStreamSource.h"
 #include "MediaTaskQueue.h"
 #include "MP3FrameParser.h"
 #include "nsThreadUtils.h"
 #include "ImageContainer.h"
@@ -107,16 +110,20 @@ MediaCodecReader::VideoResourceListener:
 void
 MediaCodecReader::VideoResourceListener::codecCanceled()
 {
   if (mReader) {
     mReader->codecCanceled(mReader->mVideoTrack);
   }
 }
 
+MediaCodecReader::TrackInputCopier::~TrackInputCopier()
+{
+}
+
 bool
 MediaCodecReader::TrackInputCopier::Copy(MediaBuffer* aSourceBuffer,
                                          sp<ABuffer> aCodecBuffer)
 {
   if (aSourceBuffer == nullptr ||
       aCodecBuffer == nullptr ||
       aSourceBuffer->range_length() > aCodecBuffer->capacity()) {
     return false;
@@ -125,28 +132,30 @@ MediaCodecReader::TrackInputCopier::Copy
   aCodecBuffer->setRange(0, aSourceBuffer->range_length());
   memcpy(aCodecBuffer->data(),
          (uint8_t*)aSourceBuffer->data() + aSourceBuffer->range_offset(),
          aSourceBuffer->range_length());
 
   return true;
 }
 
-MediaCodecReader::Track::Track()
-  : mSourceIsStopped(true)
+MediaCodecReader::Track::Track(Type type)
+  : mType(type)
+  , mSourceIsStopped(true)
   , mDurationLock("MediaCodecReader::Track::mDurationLock")
   , mDurationUs(INT64_C(0))
   , mInputIndex(sInvalidInputIndex)
   , mInputEndOfStream(false)
   , mOutputEndOfStream(false)
   , mSeekTimeUs(sInvalidTimestampUs)
   , mFlushed(false)
   , mDiscontinuity(false)
   , mTaskQueue(nullptr)
 {
+  MOZ_ASSERT(mType != kUnknown, "Should have a valid Track::Type");
 }
 
 // Append the value of |kKeyValidSamples| to the end of each vorbis buffer.
 // https://github.com/mozilla-b2g/platform_frameworks_av/blob/master/media/libstagefright/OMXCodec.cpp#L3128
 // https://github.com/mozilla-b2g/platform_frameworks_av/blob/master/media/libstagefright/NuMediaExtractor.cpp#L472
 bool
 MediaCodecReader::VorbisInputCopier::Copy(MediaBuffer* aSourceBuffer,
                                           sp<ABuffer> aCodecBuffer)
@@ -168,21 +177,23 @@ MediaCodecReader::VorbisInputCopier::Cop
          aSourceBuffer->range_length());
   memcpy(aCodecBuffer->data() + aSourceBuffer->range_length(),
          &numPageSamples, sizeof(numPageSamples));
 
   return true;
 }
 
 MediaCodecReader::AudioTrack::AudioTrack()
+  : Track(kAudio)
 {
 }
 
 MediaCodecReader::VideoTrack::VideoTrack()
-  : mWidth(0)
+  : Track(kVideo)
+  , mWidth(0)
   , mHeight(0)
   , mStride(0)
   , mSliceHeight(0)
   , mColorFormat(0)
   , mRotation(0)
 {
 }
 
@@ -274,23 +285,24 @@ MediaCodecReader::ProcessCachedDataTask:
   nsRefPtr<ReferenceKeeperRunnable<MediaCodecReader>> runnable(
       new ReferenceKeeperRunnable<MediaCodecReader>(mReader));
   mReader = nullptr;
   NS_DispatchToMainThread(runnable.get());
 }
 
 MediaCodecReader::MediaCodecReader(AbstractMediaDecoder* aDecoder)
   : MediaOmxCommonReader(aDecoder)
+  , mExtractor(nullptr)
+  , mIsWaitingResources(false)
+  , mTextureClientIndexesLock("MediaCodecReader::mTextureClientIndexesLock")
   , mColorConverterBufferSize(0)
-  , mExtractor(nullptr)
   , mParserMonitor("MediaCodecReader::mParserMonitor")
   , mParseDataFromCache(true)
   , mNextParserPosition(INT64_C(0))
   , mParsedDataLength(INT64_C(0))
-  , mIsWaitingResources(false)
 {
   mHandler = new MessageHandler(this);
   mVideoListener = new VideoResourceListener(this);
 }
 
 MediaCodecReader::~MediaCodecReader()
 {
   MOZ_ASSERT(NS_IsMainThread(), "Should be on main thread.");
@@ -321,21 +333,21 @@ MediaCodecReader::IsDormantNeeded()
   return mVideoTrack.mSource != nullptr;
 }
 
 void
 MediaCodecReader::ReleaseMediaResources()
 {
   // Stop the mSource because we are in the dormant state and the stop function
   // will rewind the mSource to the beginning of the stream.
-  if (mVideoTrack.mSource != nullptr) {
+  if (mVideoTrack.mSource != nullptr && !mVideoTrack.mSourceIsStopped) {
     mVideoTrack.mSource->stop();
     mVideoTrack.mSourceIsStopped = true;
   }
-  if (mAudioTrack.mSource != nullptr) {
+  if (mAudioTrack.mSource != nullptr && !mAudioTrack.mSourceIsStopped) {
     mAudioTrack.mSource->stop();
     mAudioTrack.mSourceIsStopped = true;
   }
   ReleaseCriticalResources();
 }
 
 void
 MediaCodecReader::Shutdown()
@@ -747,16 +759,92 @@ MediaCodecReader::ResetDecode()
     mVideoTrack.mTaskQueue->Flush();
     FlushCodecData(mVideoTrack);
     mVideoTrack.mDiscontinuity = true;
   }
 
   return MediaDecoderReader::ResetDecode();
 }
 
+void
+MediaCodecReader::TextureClientRecycleCallback(TextureClient* aClient,
+                                               void* aClosure)
+{
+  nsRefPtr<MediaCodecReader> reader = static_cast<MediaCodecReader*>(aClosure);
+  MOZ_ASSERT(reader, "reader should not be nullptr in TextureClientRecycleCallback()");
+
+  reader->TextureClientRecycleCallback(aClient);
+}
+
+void
+MediaCodecReader::TextureClientRecycleCallback(TextureClient* aClient)
+{
+  MOZ_ASSERT(aClient, "aClient should not be nullptr in RecycleCallback()");
+
+  size_t index = 0;
+
+  {
+    MutexAutoLock al(mTextureClientIndexesLock);
+
+    aClient->ClearRecycleCallback();
+
+    // aClient has been removed from mTextureClientIndexes by
+    // ReleaseAllTextureClients() on another thread.
+    if (!mTextureClientIndexes.Get(aClient, &index)) {
+      return;
+    }
+    mTextureClientIndexes.Remove(aClient);
+  }
+
+  if (mVideoTrack.mCodec != nullptr) {
+    mVideoTrack.mCodec->releaseOutputBuffer(index);
+  }
+}
+
+PLDHashOperator
+MediaCodecReader::ReleaseTextureClient(TextureClient* aClient,
+                                       size_t& aIndex,
+                                       void* aUserArg)
+{
+  nsRefPtr<MediaCodecReader> reader = static_cast<MediaCodecReader*>(aUserArg);
+  MOZ_ASSERT(reader, "reader should not be nullptr in ReleaseTextureClient()");
+
+  return reader->ReleaseTextureClient(aClient, aIndex);
+}
+
+PLDHashOperator
+MediaCodecReader::ReleaseTextureClient(TextureClient* aClient,
+                                       size_t& aIndex)
+{
+  MOZ_ASSERT(aClient, "TextureClient should be a valid pointer");
+
+  aClient->ClearRecycleCallback();
+
+  if (mVideoTrack.mCodec != nullptr) {
+    mVideoTrack.mCodec->releaseOutputBuffer(aIndex);
+  }
+
+  return PL_DHASH_REMOVE;
+}
+
+void
+MediaCodecReader::ReleaseAllTextureClients()
+{
+  MutexAutoLock al(mTextureClientIndexesLock);
+  MOZ_ASSERT(mTextureClientIndexes.Count(), "All TextureClients should be released already");
+
+  if (mTextureClientIndexes.Count() == 0) {
+    return;
+  }
+  printf_stderr("All TextureClients should be released already");
+
+  mTextureClientIndexes.Enumerate(MediaCodecReader::ReleaseTextureClient, this);
+  mTextureClientIndexes.Clear();
+}
+
 bool
 MediaCodecReader::DecodeVideoFrameSync(int64_t aTimeThreshold)
 {
   if (mVideoTrack.mCodec == nullptr || !mVideoTrack.mCodec->allocated() ||
       mVideoTrack.mOutputEndOfStream) {
     return false;
   }
 
@@ -790,99 +878,123 @@ MediaCodecReader::DecodeVideoFrameSync(i
         return false;
       }
     } else {
       return false;
     }
   }
 
   bool result = false;
-  if (bufferInfo.mBuffer != nullptr && bufferInfo.mSize > 0 &&
-      bufferInfo.mBuffer->data() != nullptr) {
-    uint8_t* yuv420p_buffer = bufferInfo.mBuffer->data();
-    int32_t stride = mVideoTrack.mStride;
-    int32_t slice_height = mVideoTrack.mSliceHeight;
-
-    // Converts to OMX_COLOR_FormatYUV420Planar
-    if (mVideoTrack.mColorFormat != OMX_COLOR_FormatYUV420Planar) {
-      ARect crop;
-      crop.top = 0;
-      crop.bottom = mVideoTrack.mHeight;
-      crop.left = 0;
-      crop.right = mVideoTrack.mWidth;
-
-      yuv420p_buffer = GetColorConverterBuffer(mVideoTrack.mWidth,
-                                               mVideoTrack.mHeight);
-      if (mColorConverter.convertDecoderOutputToI420(
-            bufferInfo.mBuffer->data(), mVideoTrack.mWidth, mVideoTrack.mHeight,
-            crop, yuv420p_buffer) != OK) {
-        mVideoTrack.mCodec->releaseOutputBuffer(bufferInfo.mIndex);
-        NS_WARNING("Unable to convert color format");
-        return false;
-      }
-
-      stride = mVideoTrack.mWidth;
-      slice_height = mVideoTrack.mHeight;
-    }
-
-    size_t yuv420p_y_size = stride * slice_height;
-    size_t yuv420p_u_size = ((stride + 1) / 2) * ((slice_height + 1) / 2);
-    uint8_t* yuv420p_y = yuv420p_buffer;
-    uint8_t* yuv420p_u = yuv420p_y + yuv420p_y_size;
-    uint8_t* yuv420p_v = yuv420p_u + yuv420p_u_size;
-
+  VideoData *v = nullptr;
+  RefPtr<TextureClient> textureClient;
+  sp<GraphicBuffer> graphicBuffer;
+  if (bufferInfo.mBuffer != nullptr) {
     // This is the approximate byte position in the stream.
     int64_t pos = mDecoder->GetResource()->Tell();
 
-    VideoData::YCbCrBuffer b;
-    b.mPlanes[0].mData = yuv420p_y;
-    b.mPlanes[0].mWidth = mVideoTrack.mWidth;
-    b.mPlanes[0].mHeight = mVideoTrack.mHeight;
-    b.mPlanes[0].mStride = stride;
-    b.mPlanes[0].mOffset = 0;
-    b.mPlanes[0].mSkip = 0;
+    if (mVideoTrack.mNativeWindow != nullptr &&
+        mVideoTrack.mCodec->getOutputGraphicBufferFromIndex(bufferInfo.mIndex, &graphicBuffer) == OK &&
+        graphicBuffer != nullptr) {
+      textureClient = mVideoTrack.mNativeWindow->getTextureClientFromBuffer(graphicBuffer.get());
+      v = VideoData::Create(mInfo.mVideo,
+                            mDecoder->GetImageContainer(),
+                            pos,
+                            bufferInfo.mTimeUs,
+                            1, // We don't know the duration.
+                            textureClient,
+                            bufferInfo.mFlags & MediaCodec::BUFFER_FLAG_SYNCFRAME,
+                            -1,
+                            mVideoTrack.mRelativePictureRect);
+    } else if (bufferInfo.mSize > 0 &&
+        bufferInfo.mBuffer->data() != nullptr) {
+      uint8_t* yuv420p_buffer = bufferInfo.mBuffer->data();
+      int32_t stride = mVideoTrack.mStride;
+      int32_t slice_height = mVideoTrack.mSliceHeight;
 
-    b.mPlanes[1].mData = yuv420p_u;
-    b.mPlanes[1].mWidth = (mVideoTrack.mWidth + 1) / 2;
-    b.mPlanes[1].mHeight = (mVideoTrack.mHeight + 1) / 2;
-    b.mPlanes[1].mStride = (stride + 1) / 2;
-    b.mPlanes[1].mOffset = 0;
-    b.mPlanes[1].mSkip = 0;
+      // Converts to OMX_COLOR_FormatYUV420Planar
+      if (mVideoTrack.mColorFormat != OMX_COLOR_FormatYUV420Planar) {
+        ARect crop;
+        crop.top = 0;
+        crop.bottom = mVideoTrack.mHeight;
+        crop.left = 0;
+        crop.right = mVideoTrack.mWidth;
+
+        yuv420p_buffer = GetColorConverterBuffer(mVideoTrack.mWidth,
+                                                 mVideoTrack.mHeight);
+        if (mColorConverter.convertDecoderOutputToI420(
+              bufferInfo.mBuffer->data(), mVideoTrack.mWidth, mVideoTrack.mHeight,
+              crop, yuv420p_buffer) != OK) {
+          mVideoTrack.mCodec->releaseOutputBuffer(bufferInfo.mIndex);
+          NS_WARNING("Unable to convert color format");
+          return false;
+        }
 
-    b.mPlanes[2].mData = yuv420p_v;
-    b.mPlanes[2].mWidth =(mVideoTrack.mWidth + 1) / 2;
-    b.mPlanes[2].mHeight = (mVideoTrack.mHeight + 1) / 2;
-    b.mPlanes[2].mStride = (stride + 1) / 2;
-    b.mPlanes[2].mOffset = 0;
-    b.mPlanes[2].mSkip = 0;
+        stride = mVideoTrack.mWidth;
+        slice_height = mVideoTrack.mHeight;
+      }
+
+      size_t yuv420p_y_size = stride * slice_height;
+      size_t yuv420p_u_size = ((stride + 1) / 2) * ((slice_height + 1) / 2);
+      uint8_t* yuv420p_y = yuv420p_buffer;
+      uint8_t* yuv420p_u = yuv420p_y + yuv420p_y_size;
+      uint8_t* yuv420p_v = yuv420p_u + yuv420p_u_size;
+
+      VideoData::YCbCrBuffer b;
+      b.mPlanes[0].mData = yuv420p_y;
+      b.mPlanes[0].mWidth = mVideoTrack.mWidth;
+      b.mPlanes[0].mHeight = mVideoTrack.mHeight;
+      b.mPlanes[0].mStride = stride;
+      b.mPlanes[0].mOffset = 0;
+      b.mPlanes[0].mSkip = 0;
 
-    VideoData *v = VideoData::Create(
-      mInfo.mVideo,
-      mDecoder->GetImageContainer(),
-      pos,
-      bufferInfo.mTimeUs,
-      1, // We don't know the duration.
-      b,
-      bufferInfo.mFlags & MediaCodec::BUFFER_FLAG_SYNCFRAME,
-      -1,
-      mVideoTrack.mRelativePictureRect);
+      b.mPlanes[1].mData = yuv420p_u;
+      b.mPlanes[1].mWidth = (mVideoTrack.mWidth + 1) / 2;
+      b.mPlanes[1].mHeight = (mVideoTrack.mHeight + 1) / 2;
+      b.mPlanes[1].mStride = (stride + 1) / 2;
+      b.mPlanes[1].mOffset = 0;
+      b.mPlanes[1].mSkip = 0;
+
+      b.mPlanes[2].mData = yuv420p_v;
+      b.mPlanes[2].mWidth =(mVideoTrack.mWidth + 1) / 2;
+      b.mPlanes[2].mHeight = (mVideoTrack.mHeight + 1) / 2;
+      b.mPlanes[2].mStride = (stride + 1) / 2;
+      b.mPlanes[2].mOffset = 0;
+      b.mPlanes[2].mSkip = 0;
+
+      v = VideoData::Create(mInfo.mVideo,
+                            mDecoder->GetImageContainer(),
+                            pos,
+                            bufferInfo.mTimeUs,
+                            1, // We don't know the duration.
+                            b,
+                            bufferInfo.mFlags & MediaCodec::BUFFER_FLAG_SYNCFRAME,
+                            -1,
+                            mVideoTrack.mRelativePictureRect);
+    }
 
     if (v) {
       result = true;
       VideoQueue().Push(v);
     } else {
       NS_WARNING("Unable to create VideoData");
     }
   }
 
   if ((bufferInfo.mFlags & MediaCodec::BUFFER_FLAG_EOS) ||
       (status == ERROR_END_OF_STREAM)) {
     VideoQueue().Finish();
   }
-  mVideoTrack.mCodec->releaseOutputBuffer(bufferInfo.mIndex);
+
+  if (v != nullptr && textureClient != nullptr && graphicBuffer != nullptr && result) {
+    MutexAutoLock al(mTextureClientIndexesLock);
+    mTextureClientIndexes.Put(textureClient.get(), bufferInfo.mIndex);
+    textureClient->SetRecycleCallback(MediaCodecReader::TextureClientRecycleCallback, this);
+  } else {
+    mVideoTrack.mCodec->releaseOutputBuffer(bufferInfo.mIndex);
+  }
 
   return result;
 }
 
 void
 MediaCodecReader::Seek(int64_t aTime,
                        int64_t aStartTime,
                        int64_t aEndTime,
@@ -976,16 +1088,17 @@ MediaCodecReader::ReleaseCriticalResourc
 {
   ResetDecode();
   // Before freeing a video codec, all video buffers needed to be released
   // even from graphics pipeline.
   VideoFrameContainer* videoframe = mDecoder->GetVideoFrameContainer();
   if (videoframe) {
     videoframe->ClearCurrentFrame();
   }
+  ReleaseAllTextureClients();
 
   DestroyMediaCodecs();
 
   ClearColorConverterBuffer();
 }
 
 void
 MediaCodecReader::ReleaseResources()
@@ -1000,27 +1113,29 @@ MediaCodecReader::ReleaseResources()
 bool
 MediaCodecReader::CreateLooper()
 {
   if (mLooper != nullptr) {
     return true;
   }
 
   // Create ALooper
-  mLooper = new ALooper;
-  mLooper->setName("MediaCodecReader");
+  sp<ALooper> looper = new ALooper;
+  looper->setName("MediaCodecReader::mLooper");
 
   // Register AMessage handler to ALooper.
-  mLooper->registerHandler(mHandler);
+  looper->registerHandler(mHandler);
 
   // Start ALooper thread.
-  if (mLooper->start() != OK) {
+  if (looper->start() != OK) {
     return false;
   }
 
+  mLooper = looper;
+
   return true;
 }
 
 void
 MediaCodecReader::DestroyLooper()
 {
   if (mLooper == nullptr) {
     return;
@@ -1197,79 +1312,92 @@ MediaCodecReader::CreateMediaCodec(sp<AL
     }
 
     if (!strcasecmp(mime, MEDIA_MIMETYPE_AUDIO_VORBIS)) {
       aTrack.mInputCopier = new VorbisInputCopier;
     } else {
       aTrack.mInputCopier = new TrackInputCopier;
     }
 
+    uint32_t capability = MediaCodecProxy::kEmptyCapability;
+    if (aTrack.mType == Track::kVideo &&
+        aTrack.mCodec->getCapability(&capability) == OK &&
+        (capability & MediaCodecProxy::kCanExposeGraphicBuffer) == MediaCodecProxy::kCanExposeGraphicBuffer) {
+      aTrack.mNativeWindow = new GonkNativeWindow();
+    }
+
     if (!aAsync) {
       // Pending configure() and start() to codecReserved() if the creation
       // should be asynchronous.
       if (!aTrack.mCodec->allocated() || !ConfigureMediaCodec(aTrack)){
         NS_WARNING("Couldn't create and configure MediaCodec synchronously");
-        aTrack.mCodec = nullptr;
+        DestroyMediaCodec(aTrack);
         return false;
       }
     }
   }
 
   return true;
 }
 
 bool
 MediaCodecReader::ConfigureMediaCodec(Track& aTrack)
 {
   if (aTrack.mSource != nullptr && aTrack.mCodec != nullptr) {
     if (!aTrack.mCodec->allocated()) {
       return false;
     }
 
+    sp<Surface> surface;
+    if (aTrack.mNativeWindow != nullptr) {
+      surface = new Surface(aTrack.mNativeWindow->getBufferQueue());
+    }
+
     sp<MetaData> sourceFormat = aTrack.mSource->getFormat();
     sp<AMessage> codecFormat;
     convertMetaDataToMessage(sourceFormat, &codecFormat);
 
     bool allpass = true;
-    if (allpass && aTrack.mCodec->configure(codecFormat, nullptr, nullptr, 0) != OK) {
+    if (allpass && aTrack.mCodec->configure(codecFormat, surface, nullptr, 0) != OK) {
       NS_WARNING("Couldn't configure MediaCodec");
       allpass = false;
     }
     if (allpass && aTrack.mCodec->start() != OK) {
       NS_WARNING("Couldn't start MediaCodec");
       allpass = false;
     }
     if (allpass && aTrack.mCodec->getInputBuffers(&aTrack.mInputBuffers) != OK) {
       NS_WARNING("Couldn't get input buffers from MediaCodec");
       allpass = false;
     }
     if (allpass && aTrack.mCodec->getOutputBuffers(&aTrack.mOutputBuffers) != OK) {
       NS_WARNING("Couldn't get output buffers from MediaCodec");
       allpass = false;
     }
     if (!allpass) {
-      aTrack.mCodec = nullptr;
+      DestroyMediaCodec(aTrack);
       return false;
     }
   }
 
   return true;
 }
 
 void
 MediaCodecReader::DestroyMediaCodecs()
 {
-  DestroyMediaCodecs(mAudioTrack);
-  DestroyMediaCodecs(mVideoTrack);
+  DestroyMediaCodec(mAudioTrack);
+  DestroyMediaCodec(mVideoTrack);
 }
 
 void
-MediaCodecReader::DestroyMediaCodecs(Track& aTrack)
+MediaCodecReader::DestroyMediaCodec(Track& aTrack)
 {
   aTrack.mCodec = nullptr;
+  aTrack.mNativeWindow = nullptr;
 }
 
 bool
 MediaCodecReader::TriggerIncrementalParser()
 {
   if (mMetaData == nullptr) {
     return false;
   }
@@ -1507,16 +1635,23 @@ MediaCodecReader::UpdateVideoInfo()
   mVideoTrack.mRelativePictureRect = relative_picture_rect;
 
   return true;
 }
 
 status_t
 MediaCodecReader::FlushCodecData(Track& aTrack)
 {
+  if (aTrack.mType == Track::kVideo) {
+    // TODO: if we do release TextureClient on a separate thread in the future,
+    // we will have to explicitly cleanup TextureClients which have been
+    // recycled through TextureClient::mRecycleCallback.
+    // Just NO-OP for now.
+  }
+
   if (aTrack.mSource == nullptr || aTrack.mCodec == nullptr ||
       !aTrack.mCodec->allocated()) {
     return UNKNOWN_ERROR;
   }
 
   status_t status = aTrack.mCodec->flush();
   aTrack.mFlushed = (status == OK);
   if (aTrack.mFlushed) {
@@ -1655,17 +1790,16 @@ MediaCodecReader::GetCodecOutputData(Tra
         break;
       } else {
         aTrack.mCodec->releaseOutputBuffer(info.mIndex);
       }
     } else if (status == INFO_OUTPUT_BUFFERS_CHANGED) {
       // Update output buffers of MediaCodec.
       if (aTrack.mCodec->getOutputBuffers(&aTrack.mOutputBuffers) != OK) {
         NS_WARNING("Couldn't get output buffers from MediaCodec");
-        aTrack.mCodec = nullptr;
         return UNKNOWN_ERROR;
       }
     }
 
     if (TimeStamp::Now() > aTimeout) {
       // Don't let this loop run for too long. Try it again later.
       return -EAGAIN;
     }
@@ -1706,17 +1840,27 @@ MediaCodecReader::EnsureCodecFormatParse
   size_t offset = 0;
   size_t size = 0;
   int64_t timeUs = INT64_C(0);
   uint32_t flags = 0;
   while ((status = aTrack.mCodec->dequeueOutputBuffer(&index, &offset, &size,
                      &timeUs, &flags)) != INFO_FORMAT_CHANGED) {
     if (status == OK) {
       aTrack.mCodec->releaseOutputBuffer(index);
+    } else if (status == INFO_OUTPUT_BUFFERS_CHANGED) {
+      // Update output buffers of MediaCodec.
+      if (aTrack.mCodec->getOutputBuffers(&aTrack.mOutputBuffers) != OK) {
+        NS_WARNING("Couldn't get output buffers from MediaCodec");
+        return false;
+      }
+    } else if (status != -EAGAIN && status != INVALID_OPERATION){
+      // FIXME: let INVALID_OPERATION pass?
+      return false; // something wrong!!!
     }
+
     status = FillCodecInputData(aTrack);
     if (status == INFO_FORMAT_CHANGED) {
       break;
     } else if (status != OK) {
       return false;
     }
   }
   return aTrack.mCodec->getOutputFormat(&format) == OK;
@@ -1771,32 +1915,32 @@ MediaCodecReader::onMessageReceived(cons
   }
 }
 
 // Called on Binder thread.
 void
 MediaCodecReader::codecReserved(Track& aTrack)
 {
   if (!ConfigureMediaCodec(aTrack)) {
-    DestroyMediaCodecs(aTrack);
+    DestroyMediaCodec(aTrack);
     return;
   }
 
   if (mHandler != nullptr) {
     // post kNotifyCodecReserved to MediaCodecReader::mLooper thread.
     sp<AMessage> notify = new AMessage(kNotifyCodecReserved, mHandler->id());
     notify->post();
   }
 }
 
 // Called on Binder thread.
 void
 MediaCodecReader::codecCanceled(Track& aTrack)
 {
-  DestroyMediaCodecs(aTrack);
+  DestroyMediaCodec(aTrack);
 
   if (mHandler != nullptr) {
     // post kNotifyCodecCanceled to MediaCodecReader::mLooper thread.
     sp<AMessage> notify = new AMessage(kNotifyCodecCanceled, mHandler->id());
     notify->post();
   }
 }
 
--- a/dom/media/omx/MediaCodecReader.h
+++ b/dom/media/omx/MediaCodecReader.h
@@ -10,39 +10,49 @@
 #include <utils/threads.h>
 
 #include <base/message_loop.h>
 
 #include <mozilla/CheckedInt.h>
 #include <mozilla/Mutex.h>
 #include <mozilla/Monitor.h>
 
+#include <nsDataHashtable.h>
+
 #include "MediaData.h"
 
 #include "I420ColorConverterHelper.h"
 #include "MediaCodecProxy.h"
 #include "MediaOmxCommonReader.h"
 
 namespace android {
 struct ALooper;
 struct AMessage;
 
 class MOZ_EXPORT MediaExtractor;
 class MOZ_EXPORT MetaData;
 class MOZ_EXPORT MediaBuffer;
 struct MOZ_EXPORT MediaSource;
+
+class GonkNativeWindow;
 } // namespace android
 
 namespace mozilla {
 
 class MediaTaskQueue;
 class MP3FrameParser;
 
+namespace layers {
+class TextureClient;
+} // namespace mozilla::layers
+
 class MediaCodecReader : public MediaOmxCommonReader
 {
+  typedef mozilla::layers::TextureClient TextureClient;
+
 public:
   MediaCodecReader(AbstractMediaDecoder* aDecoder);
   virtual ~MediaCodecReader();
 
   // Initializes the reader, returns NS_OK on success, or NS_ERROR_FAILURE
   // on failure.
   virtual nsresult Init(MediaDecoderReader* aCloneDonor);
 
@@ -96,30 +106,42 @@ public:
 
   virtual bool IsMediaSeekable() MOZ_OVERRIDE;
 
   virtual android::sp<android::MediaSource> GetAudioOffloadTrack();
 
 protected:
   struct TrackInputCopier
   {
+    virtual ~TrackInputCopier();
+
     virtual bool Copy(android::MediaBuffer* aSourceBuffer,
                       android::sp<android::ABuffer> aCodecBuffer);
   };
 
   struct Track
   {
-    Track();
+    enum Type
+    {
+      kUnknown = 0,
+      kAudio,
+      kVideo,
+    };
+
+    Track(Type type=kUnknown);
+
+    const Type mType;
 
     // pipeline parameters
     android::sp<android::MediaSource> mSource;
     bool mSourceIsStopped;
     android::sp<android::MediaCodecProxy> mCodec;
     android::Vector<android::sp<android::ABuffer> > mInputBuffers;
     android::Vector<android::sp<android::ABuffer> > mOutputBuffers;
+    android::sp<android::GonkNativeWindow> mNativeWindow;
 
     // pipeline copier
     nsAutoPtr<TrackInputCopier> mInputCopier;
 
     // media parameters
     Mutex mDurationLock; // mDurationUs might be read or updated from multiple
                          // threads.
     int64_t mDurationUs;
@@ -365,17 +387,17 @@ private:
 
   bool CreateMediaCodecs();
   static bool CreateMediaCodec(android::sp<android::ALooper>& aLooper,
                                Track& aTrack,
                                bool aAsync,
                                android::wp<android::MediaCodecProxy::CodecResourceListener> aListener);
   static bool ConfigureMediaCodec(Track& aTrack);
   void DestroyMediaCodecs();
-  static void DestroyMediaCodecs(Track& aTrack);
+  static void DestroyMediaCodec(Track& aTrack);
 
   bool CreateTaskQueues();
   void ShutdownTaskQueues();
   bool DecodeVideoFrameTask(int64_t aTimeThreshold);
   bool DecodeVideoFrameSync(int64_t aTimeThreshold);
   bool DecodeAudioDataTask();
   bool DecodeAudioDataSync();
   void DispatchVideoTask(int64_t aTimeThreshold);
@@ -391,39 +413,55 @@ private:
   }
 
   bool TriggerIncrementalParser();
 
   bool UpdateDuration();
   bool UpdateAudioInfo();
   bool UpdateVideoInfo();
 
-  static android::status_t FlushCodecData(Track& aTrack);
-  static android::status_t FillCodecInputData(Track& aTrack);
-  static android::status_t GetCodecOutputData(Track& aTrack,
-                                              CodecBufferInfo& aBuffer,
-                                              int64_t aThreshold,
-                                              const TimeStamp& aTimeout);
-  static bool EnsureCodecFormatParsed(Track& aTrack);
+  android::status_t FlushCodecData(Track& aTrack);
+  android::status_t FillCodecInputData(Track& aTrack);
+  android::status_t GetCodecOutputData(Track& aTrack,
+                                       CodecBufferInfo& aBuffer,
+                                       int64_t aThreshold,
+                                       const TimeStamp& aTimeout);
+  bool EnsureCodecFormatParsed(Track& aTrack);
 
   uint8_t* GetColorConverterBuffer(int32_t aWidth, int32_t aHeight);
   void ClearColorConverterBuffer();
 
   int64_t ProcessCachedData(int64_t aOffset,
                             nsRefPtr<SignalObject> aSignal);
   bool ParseDataSegment(const char* aBuffer,
                         uint32_t aLength,
                         int64_t aOffset);
 
+  static void TextureClientRecycleCallback(TextureClient* aClient,
+                                           void* aClosure);
+  void TextureClientRecycleCallback(TextureClient* aClient);
+
+  void ReleaseRecycledTextureClients();
+  static PLDHashOperator ReleaseTextureClient(TextureClient* aClient,
+                                              size_t& aIndex,
+                                              void* aUserArg);
+  PLDHashOperator ReleaseTextureClient(TextureClient* aClient,
+                                       size_t& aIndex);
+
+  void ReleaseAllTextureClients();
+
   android::sp<MessageHandler> mHandler;
   android::sp<VideoResourceListener> mVideoListener;
 
   android::sp<android::ALooper> mLooper;
   android::sp<android::MetaData> mMetaData;
 
+  Mutex mTextureClientIndexesLock;
+  nsDataHashtable<nsPtrHashKey<TextureClient>, size_t> mTextureClientIndexes;
+
   // media tracks
   AudioTrack mAudioTrack;
   VideoTrack mVideoTrack;
   AudioTrack mAudioOffloadTrack; // only Track::mSource is valid
 
   // color converter
   android::I420ColorConverterHelper mColorConverter;
   nsAutoArrayPtr<uint8_t> mColorConverterBuffer;