author | John Lin <jolin@mozilla.com> |
Thu, 29 Oct 2015 21:14:00 +0100 | |
changeset 270685 | 9fc683f25a53332e23b4678f0a96710520efc159 |
parent 270684 | 294042940ba93533d4b9ed66c97bb834bd1dde49 |
child 270686 | 628b84e68ac708e33010a9b22e5a680010c230d3 |
push id | 67432 |
push user | cbook@mozilla.com |
push date | Mon, 02 Nov 2015 09:31:12 +0000 |
treeherder | mozilla-inbound@628b84e68ac7 [default view] [failures only] |
perfherder | [talos] [build metrics] [platform microbench] (compared to previous push) |
reviewers | sotaro |
bugs | 1199809 |
milestone | 45.0a1 |
first release with | nightly linux32
nightly linux64
nightly mac
nightly win32
nightly win64
|
last release without | nightly linux32
nightly linux64
nightly mac
nightly win32
nightly win64
|
--- a/dom/media/platforms/gonk/GonkAudioDecoderManager.cpp +++ b/dom/media/platforms/gonk/GonkAudioDecoderManager.cpp @@ -150,49 +150,30 @@ GonkAudioDecoderManager::CreateAudioData frames, mAudioChannels, OmxCopy(data+dataOffset, size, mAudioChannels)); return NS_OK; } -class AutoReleaseAudioBuffer -{ -public: - AutoReleaseAudioBuffer(MediaBuffer* aBuffer, MediaCodecProxy* aCodecProxy) - : mAudioBuffer(aBuffer) - , mCodecProxy(aCodecProxy) - {} - - ~AutoReleaseAudioBuffer() - { - if (mAudioBuffer) { - mCodecProxy->ReleaseMediaBuffer(mAudioBuffer); - } - } -private: - MediaBuffer* mAudioBuffer; - sp<MediaCodecProxy> mCodecProxy; -}; - nsresult GonkAudioDecoderManager::Output(int64_t aStreamOffset, RefPtr<MediaData>& aOutData) { aOutData = nullptr; if (mAudioQueue.GetSize() > 0) { aOutData = mAudioQueue.PopFront(); return mAudioQueue.AtEndOfStream() ? NS_ERROR_ABORT : NS_OK; } status_t err; MediaBuffer* audioBuffer = nullptr; err = mDecoder->Output(&audioBuffer, READ_OUTPUT_BUFFER_TIMEOUT_US); - AutoReleaseAudioBuffer a(audioBuffer, mDecoder.get()); + AutoReleaseMediaBuffer a(audioBuffer, mDecoder.get()); switch (err) { case OK: { nsresult rv = CreateAudioData(audioBuffer, aStreamOffset); NS_ENSURE_SUCCESS(rv, rv); break; }
--- a/dom/media/platforms/gonk/GonkMediaDataDecoder.h +++ b/dom/media/platforms/gonk/GonkMediaDataDecoder.h @@ -6,16 +6,17 @@ #if !defined(GonkMediaDataDecoder_h_) #define GonkMediaDataDecoder_h_ #include "PlatformDecoderModule.h" #include <stagefright/foundation/AHandler.h> namespace android { struct ALooper; +class MediaBuffer; class MediaCodecProxy; } // namespace android namespace mozilla { class MediaRawData; // Manage the data flow from inputting encoded data and outputting decode data. class GonkDecoderManager : public android::AHandler { @@ -116,16 +117,44 @@ protected: android::sp<android::AMessage> mToDo; // Stores the offset of every output that needs to be read from mDecoder. nsTArray<int64_t> mWaitOutput; MediaDataDecoderCallback* mDecodeCallback; // Reports decoder output or error. }; +class AutoReleaseMediaBuffer +{ +public: + AutoReleaseMediaBuffer(android::MediaBuffer* aBuffer, android::MediaCodecProxy* aCodec) + : mBuffer(aBuffer) + , mCodec(aCodec) + {} + + ~AutoReleaseMediaBuffer() + { + MOZ_ASSERT(mCodec.get()); + if (mBuffer) { + mCodec->ReleaseMediaBuffer(mBuffer); + } + } + + android::MediaBuffer* forget() + { + android::MediaBuffer* tmp = mBuffer; + mBuffer = nullptr; + return tmp; + } + +private: + android::MediaBuffer* mBuffer; + android::sp<android::MediaCodecProxy> mCodec; +}; + // Samples are decoded using the GonkDecoder (MediaCodec) // created by the GonkDecoderManager. This class implements // the higher-level logic that drives mapping the Gonk to the async // MediaDataDecoder interface. The specifics of decoding the exact stream // type are handled by GonkDecoderManager and the GonkDecoder it creates. class GonkMediaDataDecoder : public MediaDataDecoder { public: GonkMediaDataDecoder(GonkDecoderManager* aDecoderManager,
--- a/dom/media/platforms/gonk/GonkVideoDecoderManager.cpp +++ b/dom/media/platforms/gonk/GonkVideoDecoderManager.cpp @@ -132,49 +132,50 @@ GonkVideoDecoderManager::Init() } mDecoder->AsyncAllocateVideoMediaCodec(); return p; } nsresult -GonkVideoDecoderManager::CreateVideoData(int64_t aStreamOffset, VideoData **v) +GonkVideoDecoderManager::CreateVideoData(MediaBuffer* aBuffer, + int64_t aStreamOffset, + VideoData **v) { *v = nullptr; RefPtr<VideoData> data; int64_t timeUs; int32_t keyFrame; - if (mVideoBuffer == nullptr) { + if (aBuffer == nullptr) { GVDM_LOG("Video Buffer is not valid!"); return NS_ERROR_UNEXPECTED; } - if (!mVideoBuffer->meta_data()->findInt64(kKeyTime, &timeUs)) { - ReleaseVideoBuffer(); + AutoReleaseMediaBuffer autoRelease(aBuffer, mDecoder.get()); + + if (!aBuffer->meta_data()->findInt64(kKeyTime, &timeUs)) { GVDM_LOG("Decoder did not return frame time"); return NS_ERROR_UNEXPECTED; } if (mLastTime > timeUs) { - ReleaseVideoBuffer(); GVDM_LOG("Output decoded sample time is revert. time=%lld", timeUs); return NS_ERROR_NOT_AVAILABLE; } mLastTime = timeUs; - if (mVideoBuffer->range_length() == 0) { + if (aBuffer->range_length() == 0) { // Some decoders may return spurious empty buffers that we just want to ignore // quoted from Android's AwesomePlayer.cpp - ReleaseVideoBuffer(); return NS_ERROR_NOT_AVAILABLE; } - if (!mVideoBuffer->meta_data()->findInt32(kKeyIsSyncFrame, &keyFrame)) { + if (!aBuffer->meta_data()->findInt32(kKeyIsSyncFrame, &keyFrame)) { keyFrame = 0; } gfx::IntRect picture = mPicture; if (mFrameInfo.mWidth != mInitialFrame.width || mFrameInfo.mHeight != mInitialFrame.height) { // Frame size is different from what the container reports. This is legal, @@ -183,55 +184,55 @@ GonkVideoDecoderManager::CreateVideoData picture.x = (mPicture.x * mFrameInfo.mWidth) / mInitialFrame.width; picture.y = (mPicture.y * mFrameInfo.mHeight) / mInitialFrame.height; picture.width = (mFrameInfo.mWidth * mPicture.width) / mInitialFrame.width; picture.height = (mFrameInfo.mHeight * mPicture.height) / mInitialFrame.height; } RefPtr<mozilla::layers::TextureClient> textureClient; - if ((mVideoBuffer->graphicBuffer().get())) { - textureClient = mNativeWindow->getTextureClientFromBuffer(mVideoBuffer->graphicBuffer().get()); + if ((aBuffer->graphicBuffer().get())) { + textureClient = mNativeWindow->getTextureClientFromBuffer(aBuffer->graphicBuffer().get()); } if (textureClient) { GrallocTextureClientOGL* grallocClient = static_cast<GrallocTextureClientOGL*>(textureClient.get()); - grallocClient->SetMediaBuffer(mVideoBuffer); + grallocClient->SetMediaBuffer(aBuffer); textureClient->SetRecycleCallback(GonkVideoDecoderManager::RecycleCallback, this); + autoRelease.forget(); // RecycleCallback will return it back to decoder. data = VideoData::Create(mInfo.mVideo, mImageContainer, aStreamOffset, timeUs, 1, // No way to pass sample duration from muxer to // OMX codec, so we hardcode the duration here. textureClient, keyFrame, -1, picture); } else { - if (!mVideoBuffer->data()) { + if (!aBuffer->data()) { GVDM_LOG("No data in Video Buffer!"); return NS_ERROR_UNEXPECTED; } - uint8_t *yuv420p_buffer = (uint8_t *)mVideoBuffer->data(); + uint8_t *yuv420p_buffer = (uint8_t *)aBuffer->data(); int32_t stride = mFrameInfo.mStride; int32_t slice_height = mFrameInfo.mSliceHeight; // Converts to OMX_COLOR_FormatYUV420Planar if (mFrameInfo.mColorFormat != OMX_COLOR_FormatYUV420Planar) { ARect crop; crop.top = 0; crop.bottom = mFrameInfo.mHeight; crop.left = 0; crop.right = mFrameInfo.mWidth; yuv420p_buffer = GetColorConverterBuffer(mFrameInfo.mWidth, mFrameInfo.mHeight); - if (mColorConverter.convertDecoderOutputToI420(mVideoBuffer->data(), + if (mColorConverter.convertDecoderOutputToI420(aBuffer->data(), mFrameInfo.mWidth, mFrameInfo.mHeight, crop, yuv420p_buffer) != OK) { - ReleaseVideoBuffer(); GVDM_LOG("Color conversion failed!"); return NS_ERROR_UNEXPECTED; } stride = mFrameInfo.mWidth; slice_height = mFrameInfo.mHeight; } size_t yuv420p_y_size = stride * slice_height; @@ -270,17 +271,16 @@ GonkVideoDecoderManager::CreateVideoData mImageContainer, pos, timeUs, 1, // We don't know the duration. b, keyFrame, -1, picture); - ReleaseVideoBuffer(); } data.forget(v); return NS_OK; } bool GonkVideoDecoderManager::SetVideoFormat() @@ -331,23 +331,24 @@ GonkVideoDecoderManager::Output(int64_t RefPtr<MediaData>& aOutData) { aOutData = nullptr; status_t err; if (mDecoder == nullptr) { GVDM_LOG("Decoder is not inited"); return NS_ERROR_UNEXPECTED; } - err = mDecoder->Output(&mVideoBuffer, READ_OUTPUT_BUFFER_TIMEOUT_US); + MediaBuffer* outputBuffer; + err = mDecoder->Output(&outputBuffer, READ_OUTPUT_BUFFER_TIMEOUT_US); switch (err) { case OK: { RefPtr<VideoData> data; - nsresult rv = CreateVideoData(aStreamOffset, getter_AddRefs(data)); + nsresult rv = CreateVideoData(outputBuffer, aStreamOffset, getter_AddRefs(data)); if (rv == NS_ERROR_NOT_AVAILABLE) { // Decoder outputs a empty video buffer, try again return NS_ERROR_NOT_AVAILABLE; } else if (rv != NS_OK || data == nullptr) { GVDM_LOG("Failed to create VideoData"); return NS_ERROR_UNEXPECTED; } aOutData = data; @@ -374,17 +375,17 @@ GonkVideoDecoderManager::Output(int64_t { // GVDM_LOG("Need to try again!"); return NS_ERROR_NOT_AVAILABLE; } case android::ERROR_END_OF_STREAM: { GVDM_LOG("Got the EOS frame!"); RefPtr<VideoData> data; - nsresult rv = CreateVideoData(aStreamOffset, getter_AddRefs(data)); + nsresult rv = CreateVideoData(outputBuffer, aStreamOffset, getter_AddRefs(data)); if (rv == NS_ERROR_NOT_AVAILABLE) { // For EOS, no need to do any thing. return NS_ERROR_ABORT; } if (rv != NS_OK || data == nullptr) { GVDM_LOG("Failed to create video data"); return NS_ERROR_UNEXPECTED; } @@ -401,23 +402,16 @@ GonkVideoDecoderManager::Output(int64_t GVDM_LOG("Decoder failed, err=%d", err); return NS_ERROR_UNEXPECTED; } } return NS_OK; } -void GonkVideoDecoderManager::ReleaseVideoBuffer() { - if (mVideoBuffer) { - mDecoder->ReleaseMediaBuffer(mVideoBuffer); - mVideoBuffer = nullptr; - } -} - void GonkVideoDecoderManager::codecReserved() { if (mInitPromise.IsEmpty()) { return; } GVDM_LOG("codecReserved"); sp<AMessage> format = new AMessage;
--- a/dom/media/platforms/gonk/GonkVideoDecoderManager.h +++ b/dom/media/platforms/gonk/GonkVideoDecoderManager.h @@ -88,18 +88,17 @@ private: VideoResourceListener(const VideoResourceListener &rhs) = delete; const VideoResourceListener &operator=(const VideoResourceListener &rhs) = delete; MozPromiseHolder<MediaResourcePromise> mVideoCodecPromise; }; bool SetVideoFormat(); - nsresult CreateVideoData(int64_t aStreamOffset, VideoData** aOutData); - void ReleaseVideoBuffer(); + nsresult CreateVideoData(MediaBuffer* aBuffer, int64_t aStreamOffset, VideoData** aOutData); uint8_t* GetColorConverterBuffer(int32_t aWidth, int32_t aHeight); // For codec resource management void codecReserved(); void codecCanceled(); void ReleaseAllPendingVideoBuffers(); void PostReleaseVideoBuffer(android::MediaBuffer *aBuffer, @@ -109,18 +108,16 @@ private: uint32_t mVideoHeight; uint32_t mDisplayWidth; uint32_t mDisplayHeight; nsIntRect mPicture; nsIntSize mInitialFrame; RefPtr<layers::ImageContainer> mImageContainer; - android::MediaBuffer* mVideoBuffer; - MediaInfo mInfo; android::sp<VideoResourceListener> mVideoListener; MozPromiseRequestHolder<MediaResourcePromise> mVideoCodecRequest; FrameInfo mFrameInfo; // color converter android::I420ColorConverterHelper mColorConverter; nsAutoArrayPtr<uint8_t> mColorConverterBuffer;