Bug 911046 - Get graphic buffers of decoded frames through gonk native window callback. r=jesup
authorJohn Lin <jolin@mozilla.com>
Sun, 27 Apr 2014 21:07:00 -0400
changeset 180943 1447c1d13eb0c0a5b127ca32dd800ac6eff64ce4
parent 180942 c4bb530fc9db1a07dca2b423f89d93d46d6d29d9
child 180944 1201268ce09031a7fb284e9c6c171a8712b9e2d4
push id272
push userpvanderbeken@mozilla.com
push dateMon, 05 May 2014 16:31:18 +0000
reviewersjesup
bugs911046
milestone31.0a1
Bug 911046 - Get graphic buffers of decoded frames through gonk native window callback. r=jesup
media/webrtc/signaling/src/media-conduit/WebrtcOMXH264VideoCodec.cpp
--- a/media/webrtc/signaling/src/media-conduit/WebrtcOMXH264VideoCodec.cpp
+++ b/media/webrtc/signaling/src/media-conduit/WebrtcOMXH264VideoCodec.cpp
@@ -21,17 +21,16 @@
 using namespace android;
 
 // WebRTC
 #include "common_video/interface/texture_video_frame.h"
 #include "video_engine/include/vie_external_codec.h"
 
 // Gecko
 #include "GonkNativeWindow.h"
-#include "GonkNativeWindowClient.h"
 #include "mozilla/Atomics.h"
 #include "mozilla/Mutex.h"
 #include "nsThreadUtils.h"
 #include "OMXCodecWrapper.h"
 #include "TextureClient.h"
 
 #define DEQUEUE_BUFFER_TIMEOUT_US (100 * 1000ll) // 100ms.
 #define START_DEQUEUE_BUFFER_TIMEOUT_US (10 * DEQUEUE_BUFFER_TIMEOUT_US) // 1s.
@@ -85,39 +84,16 @@ public:
   {
     return DummyRefCountBase::Release();
   }
 
 private:
   RefPtr<layers::Image> mImage;
 };
 
-// Graphic buffer lifecycle management.
-// Return buffer to OMX codec when renderer is done with it.
-class RecycleCallback
-{
-public:
-  RecycleCallback(const sp<MediaCodec>& aOmx, uint32_t aBufferIndex)
-    : mOmx(aOmx)
-    , mBufferIndex(aBufferIndex)
-  {}
-  typedef void* CallbackPtr;
-  static void ReturnOMXBuffer(layers::TextureClient* aClient, CallbackPtr aClosure)
-  {
-    aClient->ClearRecycleCallback();
-    RecycleCallback* self = static_cast<RecycleCallback*>(aClosure);
-    self->mOmx->releaseOutputBuffer(self->mBufferIndex);
-    delete self;
-  }
-
-private:
-  sp<MediaCodec> mOmx;
-  uint32_t mBufferIndex;
-};
-
 struct EncodedFrame
 {
   uint32_t mWidth;
   uint32_t mHeight;
   uint32_t mTimestamp;
   int64_t mRenderTimeMs;
 };
 
@@ -224,24 +200,29 @@ private:
   // wait/notify queued input.
   Monitor mMonitor;
   nsCOMPtr<nsIThread> mThread;
   std::queue<EncodedFrame> mInputFrames;
   bool mEnding;
 };
 
 // H.264 decoder using stagefright.
-class WebrtcOMXDecoder MOZ_FINAL
+// It implements gonk native window callback to receive buffers from
+// MediaCodec::RenderOutputBufferAndRelease().
+class WebrtcOMXDecoder MOZ_FINAL : public GonkNativeWindowNewFrameCallback
 {
   NS_INLINE_DECL_THREADSAFE_REFCOUNTING(WebrtcOMXDecoder)
 public:
-  WebrtcOMXDecoder(const char* aMimeType)
+  WebrtcOMXDecoder(const char* aMimeType,
+                   webrtc::DecodedImageCallback* aCallback)
     : mWidth(0)
     , mHeight(0)
     , mStarted(false)
+    , mDecodedFrameLock("WebRTC decoded frame lock")
+    , mCallback(aCallback)
   {
     // Create binder thread pool required by stagefright.
     android::ProcessState::self()->startThreadPool();
 
     mLooper = new ALooper;
     mLooper->start();
     mCodec = MediaCodec::CreateByType(mLooper, aMimeType, false /* encoder */);
   }
@@ -285,31 +266,30 @@ public:
     config->setInt32("width", width);
     config->setInt32("height", height);
     mWidth = width;
     mHeight = height;
 
     sp<Surface> surface = nullptr;
     mNativeWindow = new GonkNativeWindow();
     if (mNativeWindow.get()) {
-      mNativeWindowClient = new GonkNativeWindowClient(mNativeWindow->getBufferQueue());
-      if (mNativeWindowClient.get()) {
-        surface = new Surface(mNativeWindowClient->getIGraphicBufferProducer());
-      }
+      // listen to buffers queued by MediaCodec::RenderOutputBufferAndRelease().
+      mNativeWindow->setNewFrameCallback(this);
+      surface = new Surface(mNativeWindow->getBufferQueue());
     }
     status_t result = mCodec->configure(config, surface, nullptr, 0);
     if (result == OK) {
       result = Start();
     }
     return result;
   }
 
   status_t
   FillInput(const webrtc::EncodedImage& aEncoded, bool aIsFirstFrame,
-            int64_t& aRenderTimeMs, webrtc::DecodedImageCallback* aCallback)
+            int64_t& aRenderTimeMs)
   {
     MOZ_ASSERT(mCodec != nullptr);
     if (mCodec == nullptr) {
       return INVALID_OPERATION;
     }
 
     size_t index;
     status_t err = mCodec->dequeueInputBuffer(&index,
@@ -330,32 +310,32 @@ public:
     omxIn->setRange(0, size);
     // Copying is needed because MediaCodec API doesn't support externallay
     // allocated buffer as input.
     memcpy(omxIn->data(), aEncoded._buffer, size);
     int64_t inputTimeUs = aEncoded._timeStamp * 1000 / 90; // 90kHz -> us.
     err = mCodec->queueInputBuffer(index, 0, size, inputTimeUs, flags);
     if (err == OK && !(flags & MediaCodec::BUFFER_FLAG_CODECCONFIG)) {
       if (mOutputDrain == nullptr) {
-        mOutputDrain = new OutputDrain(this, aCallback);
+        mOutputDrain = new OutputDrain(this);
         mOutputDrain->Start();
       }
       EncodedFrame frame;
       frame.mWidth = mWidth;
       frame.mHeight = mHeight;
       frame.mTimestamp = aEncoded._timeStamp;
       frame.mRenderTimeMs = aRenderTimeMs;
       mOutputDrain->QueueInput(frame);
     }
 
     return err;
   }
 
   status_t
-  DrainOutput(const EncodedFrame& aFrame, webrtc::DecodedImageCallback* aCallback)
+  DrainOutput(const EncodedFrame& aFrame)
   {
     MOZ_ASSERT(mCodec != nullptr);
     if (mCodec == nullptr) {
       return INVALID_OPERATION;
     }
 
     size_t index = 0;
     size_t outOffset = 0;
@@ -385,49 +365,91 @@ public:
         MOZ_ASSERT(err == OK);
         return INFO_OUTPUT_BUFFERS_CHANGED;
       default:
         CODEC_LOGE("decode dequeue OMX output buffer error:%d", err);
         // Return OK to instruct OutputDrain to drop input from queue.
         return OK;
     }
 
-    sp<ABuffer> omxOut = mOutputBuffers.itemAt(index);
-    nsAutoPtr<webrtc::I420VideoFrame> videoFrame(GenerateVideoFrame(aFrame,
-                                                                    index,
-                                                                    omxOut));
-    if (videoFrame == nullptr) {
+    if (mCallback) {
+      {
+        // Store info of this frame. OnNewFrame() will need the timestamp later.
+        MutexAutoLock lock(mDecodedFrameLock);
+        mDecodedFrames.push(aFrame);
+      }
+      // Ask codec to queue buffer back to native window. OnNewFrame() will be
+      // called.
+      mCodec->renderOutputBufferAndRelease(index);
+      // Once consumed, buffer will be queued back to GonkNativeWindow for codec
+      // to dequeue/use.
+    } else {
       mCodec->releaseOutputBuffer(index);
-    } else if (aCallback) {
-      aCallback->Decoded(*videoFrame);
-      // OMX buffer will be released by RecycleCallback after rendered.
     }
 
     return err;
   }
 
+  // Will be called when MediaCodec::RenderOutputBufferAndRelease() returns
+  // buffers back to native window for rendering.
+  void OnNewFrame() MOZ_OVERRIDE
+  {
+    RefPtr<layers::TextureClient> buffer = mNativeWindow->getCurrentBuffer();
+    MOZ_ASSERT(buffer != nullptr);
+
+    layers::GrallocImage::GrallocData grallocData;
+    grallocData.mPicSize = buffer->GetSize();
+    grallocData.mGraphicBuffer = buffer;
+
+    nsAutoPtr<layers::GrallocImage> grallocImage(new layers::GrallocImage());
+    grallocImage->SetData(grallocData);
+
+    // Get timestamp of the frame about to render.
+    int64_t timestamp = -1;
+    int64_t renderTimeMs = -1;
+    {
+      MutexAutoLock lock(mDecodedFrameLock);
+      if (mDecodedFrames.empty()) {
+        return;
+      }
+      EncodedFrame decoded = mDecodedFrames.front();
+      timestamp = decoded.mTimestamp;
+      renderTimeMs = decoded.mRenderTimeMs;
+      mDecodedFrames.pop();
+    }
+    MOZ_ASSERT(timestamp >= 0 && renderTimeMs >= 0);
+
+    nsAutoPtr<webrtc::I420VideoFrame> videoFrame(
+      new webrtc::TextureVideoFrame(new ImageNativeHandle(grallocImage.forget()),
+                                    grallocData.mPicSize.width,
+                                    grallocData.mPicSize.height,
+                                    timestamp,
+                                    renderTimeMs));
+    if (videoFrame != nullptr) {
+      mCallback->Decoded(*videoFrame);
+    }
+  }
+
 private:
   class OutputDrain : public OMXOutputDrain
   {
   public:
-    OutputDrain(WebrtcOMXDecoder* aOMX, webrtc::DecodedImageCallback* aCallback)
+    OutputDrain(WebrtcOMXDecoder* aOMX)
       : OMXOutputDrain()
       , mOMX(aOMX)
-      , mCallback(aCallback)
     {}
 
   protected:
     virtual bool DrainOutput(const EncodedFrame& aFrame) MOZ_OVERRIDE
     {
-      return (mOMX->DrainOutput(aFrame, mCallback) == OK);
+      return (mOMX->DrainOutput(aFrame) == OK);
     }
 
   private:
     WebrtcOMXDecoder* mOMX;
-    webrtc::DecodedImageCallback* mCallback;
   };
 
   status_t Start()
   {
     MOZ_ASSERT(!mStarted);
     if (mStarted) {
       return OK;
     }
@@ -443,16 +465,25 @@ private:
   }
 
   status_t Stop()
   {
     MOZ_ASSERT(mStarted);
     if (!mStarted) {
       return OK;
     }
+
+    // Drop all 'pending to render' frames.
+    {
+      MutexAutoLock lock(mDecodedFrameLock);
+      while (!mDecodedFrames.empty()) {
+        mDecodedFrames.pop();
+      }
+    }
+
     if (mOutputDrain != nullptr) {
       mOutputDrain->Stop();
       mOutputDrain = nullptr;
     }
 
     status_t err = mCodec->stop();
     if (err == OK) {
       mInputBuffers.clear();
@@ -460,71 +491,31 @@ private:
       mStarted = false;
     } else {
       MOZ_ASSERT(false);
     }
 
     return err;
   }
 
-  webrtc::I420VideoFrame*
-  GenerateVideoFrame(const EncodedFrame& aEncoded, uint32_t aBufferIndex,
-                     const sp<ABuffer>& aOMXBuffer)
-  {
-    // TODO: Get decoded frame buffer through native window to obsolete
-    //       changes to stagefright code.
-    sp<RefBase> obj;
-    bool hasGraphicBuffer = aOMXBuffer->meta()->findObject("graphic-buffer", &obj);
-    if (!hasGraphicBuffer) {
-      MOZ_ASSERT(false, "Decoder doesn't produce graphic buffer");
-      // Nothing to render.
-      return nullptr;
-    }
-
-    sp<GraphicBuffer> gb = static_cast<GraphicBuffer*>(obj.get());
-    if (!gb.get()) {
-      MOZ_ASSERT(false, "Null graphic buffer");
-      return nullptr;
-    }
-
-    RefPtr<mozilla::layers::TextureClient> textureClient =
-      mNativeWindow->getTextureClientFromBuffer(gb.get());
-    textureClient->SetRecycleCallback(RecycleCallback::ReturnOMXBuffer,
-                                      new RecycleCallback(mCodec, aBufferIndex));
-
-    int width = gb->getWidth();
-    int height = gb->getHeight();
-    layers::GrallocImage::GrallocData grallocData;
-    grallocData.mPicSize = gfx::IntSize(width, height);
-    grallocData.mGraphicBuffer = textureClient;
-
-    layers::GrallocImage* grallocImage = new layers::GrallocImage();
-    grallocImage->SetData(grallocData);
-
-    nsAutoPtr<webrtc::I420VideoFrame> videoFrame(
-      new webrtc::TextureVideoFrame(new ImageNativeHandle(grallocImage),
-                                    width, height,
-                                    aEncoded.mTimestamp,
-                                    aEncoded.mRenderTimeMs));
-
-    return videoFrame.forget();
-  }
-
   sp<ALooper> mLooper;
   sp<MediaCodec> mCodec; // OMXCodec
   int mWidth;
   int mHeight;
   android::Vector<sp<ABuffer> > mInputBuffers;
   android::Vector<sp<ABuffer> > mOutputBuffers;
   bool mStarted;
 
   sp<GonkNativeWindow> mNativeWindow;
-  sp<GonkNativeWindowClient> mNativeWindowClient;
 
   RefPtr<OutputDrain> mOutputDrain;
+  webrtc::DecodedImageCallback* mCallback;
+
+  Mutex mDecodedFrameLock; // To protect mDecodedFrames.
+  std::queue<EncodedFrame> mDecodedFrames;
 };
 
 class EncOutputDrain : public OMXOutputDrain
 {
 public:
   EncOutputDrain(OMXVideoEncoder* aOMX, webrtc::EncodedImageCallback* aCallback)
     : OMXOutputDrain()
     , mOMX(aOMX)
@@ -815,29 +806,30 @@ WebrtcOMXH264VideoDecoder::Decode(const 
   if (!configured) {
     // Search for SPS/PPS NALUs in input to get decoder config.
     sp<ABuffer> input = new ABuffer(aInputImage._buffer, aInputImage._length);
     sp<MetaData> paramSets = WebrtcOMXDecoder::ParseParamSets(input);
     if (NS_WARN_IF(paramSets == nullptr)) {
       // Cannot config decoder because SPS/PPS NALUs haven't been seen.
       return WEBRTC_VIDEO_CODEC_UNINITIALIZED;
     }
-    RefPtr<WebrtcOMXDecoder> omx = new WebrtcOMXDecoder(MEDIA_MIMETYPE_VIDEO_AVC);
+    RefPtr<WebrtcOMXDecoder> omx = new WebrtcOMXDecoder(MEDIA_MIMETYPE_VIDEO_AVC,
+                                                        mCallback);
     status_t result = omx->ConfigureWithParamSets(paramSets);
     if (NS_WARN_IF(result != OK)) {
       return WEBRTC_VIDEO_CODEC_UNINITIALIZED;
     }
     CODEC_LOGD("WebrtcOMXH264VideoDecoder:%p start OMX", this);
     mOMX = omx;
   }
 
   bool feedFrame = true;
   while (feedFrame) {
     int64_t timeUs;
-    status_t err = mOMX->FillInput(aInputImage, !configured, aRenderTimeMs, mCallback);
+    status_t err = mOMX->FillInput(aInputImage, !configured, aRenderTimeMs);
     feedFrame = (err == -EAGAIN); // No input buffer available. Try again.
   }
 
   return WEBRTC_VIDEO_CODEC_OK;
 }
 
 int32_t
 WebrtcOMXH264VideoDecoder::RegisterDecodeCompleteCallback(webrtc::DecodedImageCallback* aCallback)