author | David Anderson <danderson@mozilla.com> |
Tue, 17 Nov 2015 00:09:01 -0800 | |
changeset 272929 | 85640303b72080bab9979100bfdd0d98baa90679 |
parent 272928 | 57c5d2b88d266b078978a2dc0e956958fee98ac6 |
child 272930 | 1274f88a8e351dca54507b47b38b25e79d87b069 |
push id | 29688 |
push user | kwierso@gmail.com |
push date | Tue, 17 Nov 2015 21:10:09 +0000 |
treeherder | mozilla-central@eed903a7e4e7 [default view] [failures only] |
perfherder | [talos] [build metrics] [platform microbench] (compared to previous push) |
reviewers | sotaro |
bugs | 1222910 |
milestone | 45.0a1 |
first release with | nightly linux32
nightly linux64
nightly mac
nightly win32
nightly win64
|
last release without | nightly linux32
nightly linux64
nightly mac
nightly win32
nightly win64
|
--- a/dom/camera/GonkCameraControl.cpp +++ b/dom/camera/GonkCameraControl.cpp @@ -2383,25 +2383,21 @@ nsGonkCameraControl::OnPoster(void* aDat } CameraControlImpl::OnPoster(blobImpl); } void nsGonkCameraControl::OnNewPreviewFrame(layers::TextureClient* aBuffer) { #ifdef MOZ_WIDGET_GONK - RefPtr<Image> frame = mImageContainer->CreateImage(ImageFormat::GRALLOC_PLANAR_YCBCR); - - GrallocImage* videoImage = static_cast<GrallocImage*>(frame.get()); + RefPtr<GrallocImage> frame = new GrallocImage(); - GrallocImage::GrallocData data; - data.mGraphicBuffer = aBuffer; - data.mPicSize = IntSize(mCurrentConfiguration.mPreviewSize.width, - mCurrentConfiguration.mPreviewSize.height); - videoImage->SetData(data); + IntSize picSize(mCurrentConfiguration.mPreviewSize.width, + mCurrentConfiguration.mPreviewSize.height); + frame->SetData(aBuffer, picSize); if (mCapturePoster.exchange(false)) { CreatePoster(frame, mCurrentConfiguration.mPreviewSize.width, mCurrentConfiguration.mPreviewSize.height, mVideoRotation); return; }
--- a/dom/media/DOMMediaStream.cpp +++ b/dom/media/DOMMediaStream.cpp @@ -1039,20 +1039,19 @@ DOMAudioNodeMediaStream::CreateTrackUnio stream->InitTrackUnionStream(aWindow, aGraph); return stream.forget(); } DOMHwMediaStream::DOMHwMediaStream() { #ifdef MOZ_WIDGET_GONK mImageContainer = LayerManager::CreateImageContainer(ImageContainer::ASYNCHRONOUS_OVERLAY); - RefPtr<Image> img = mImageContainer->CreateImage(ImageFormat::OVERLAY_IMAGE); - mOverlayImage = static_cast<layers::OverlayImage*>(img.get()); + mOverlayImage = mImageContainer->CreateOverlayImage(); nsAutoTArray<ImageContainer::NonOwningImage,1> images; - images.AppendElement(ImageContainer::NonOwningImage(img)); + images.AppendElement(ImageContainer::NonOwningImage(mOverlayImage)); mImageContainer->SetCurrentImages(images); #endif } DOMHwMediaStream::~DOMHwMediaStream() { }
--- a/dom/media/MediaData.cpp +++ b/dom/media/MediaData.cpp @@ -307,17 +307,17 @@ VideoData::Create(const VideoInfo& aInfo const YCbCrBuffer::Plane &Cr = aBuffer.mPlanes[2]; #endif if (!aImage) { // Currently our decoder only knows how to output to ImageFormat::PLANAR_YCBCR // format. #ifdef MOZ_WIDGET_GONK if (IsYV12Format(Y, Cb, Cr) && !IsInEmulator()) { - v->mImage = aContainer->CreateImage(ImageFormat::GRALLOC_PLANAR_YCBCR); + v->mImage = new layers::GrallocImage(); } #endif if (!v->mImage) { v->mImage = aContainer->CreatePlanarYCbCrImage(); } } else { v->mImage = aImage; } @@ -456,32 +456,19 @@ VideoData::Create(const VideoInfo& aInfo RefPtr<VideoData> v(new VideoData(aOffset, aTime, aDuration, aKeyframe, aTimecode, aInfo.mDisplay, 0)); - v->mImage = aContainer->CreateImage(ImageFormat::GRALLOC_PLANAR_YCBCR); - if (!v->mImage) { - return nullptr; - } - NS_ASSERTION(v->mImage->GetFormat() == ImageFormat::GRALLOC_PLANAR_YCBCR, - "Wrong format?"); - typedef mozilla::layers::GrallocImage GrallocImage; - GrallocImage* videoImage = static_cast<GrallocImage*>(v->mImage.get()); - GrallocImage::GrallocData data; - - data.mPicSize = aPicture.Size(); - data.mGraphicBuffer = aBuffer; - - if (!videoImage->SetData(data)) { - return nullptr; - } + RefPtr<layers::GrallocImage> image = new layers::GrallocImage(); + image->SetData(aBuffer, aPicture.Size()); + v->mImage = image; return v.forget(); } #endif // MOZ_OMX_DECODER // Alignment value - 1. 0 means that data isn't aligned. // For 32-bytes aligned, use 31U. #define RAW_DATA_ALIGNMENT 31U
--- a/dom/media/webrtc/MediaEngineGonkVideoSource.cpp +++ b/dom/media/webrtc/MediaEngineGonkVideoSource.cpp @@ -733,19 +733,19 @@ MediaEngineGonkVideoSource::RotateImage( void *pMem = nullptr; // Bug 1109957 size will be wrong if width or height are odd uint32_t size = aWidth * aHeight * 3 / 2; MOZ_ASSERT(!(aWidth & 1) && !(aHeight & 1)); graphicBuffer->lock(GraphicBuffer::USAGE_SW_READ_MASK, &pMem); uint8_t* srcPtr = static_cast<uint8_t*>(pMem); + // Create a video frame and append it to the track. - ImageFormat format = ImageFormat::GONK_CAMERA_IMAGE; - RefPtr<layers::Image> image = mImageContainer->CreateImage(format); + RefPtr<layers::PlanarYCbCrImage> image = new GonkCameraImage(); uint32_t dstWidth; uint32_t dstHeight; if (mRotation == 90 || mRotation == 270) { dstWidth = aHeight; dstHeight = aWidth; } else { @@ -758,18 +758,17 @@ MediaEngineGonkVideoSource::RotateImage( MOZ_ASSERT(mTextureClientAllocator); RefPtr<layers::TextureClient> textureClient = mTextureClientAllocator->CreateOrRecycle(gfx::SurfaceFormat::YUV, gfx::IntSize(dstWidth, dstHeight), layers::BackendSelector::Content, layers::TextureFlags::DEFAULT, layers::ALLOC_DISALLOW_BUFFERTEXTURECLIENT); if (textureClient) { - RefPtr<layers::GrallocTextureClientOGL> grallocTextureClient = - static_cast<layers::GrallocTextureClientOGL*>(textureClient.get()); + RefPtr<layers::GrallocTextureClientOGL> grallocTextureClient = textureClient->AsGrallocTextureClientOGL(); android::sp<android::GraphicBuffer> destBuffer = grallocTextureClient->GetGraphicBuffer(); void* destMem = nullptr; destBuffer->lock(android::GraphicBuffer::USAGE_SW_WRITE_OFTEN, &destMem); uint8_t* dstPtr = static_cast<uint8_t*>(destMem); int32_t yStride = destBuffer->getStride(); @@ -782,21 +781,17 @@ MediaEngineGonkVideoSource::RotateImage( dstPtr + (yStride * dstHeight), uvStride, 0, 0, graphicBuffer->getStride(), aHeight, aWidth, aHeight, static_cast<libyuv::RotationMode>(mRotation), libyuv::FOURCC_NV21); destBuffer->unlock(); - layers::GrallocImage::GrallocData data; - - data.mPicSize = gfx::IntSize(dstWidth, dstHeight); - data.mGraphicBuffer = textureClient; - image->AsGrallocImage()->SetData(data); + image->AsGrallocImage()->SetData(textureClient, gfx::IntSize(dstWidth, dstHeight)); } else { // Handle out of gralloc case. image = mImageContainer->CreatePlanarYCbCrImage(); uint8_t* dstPtr = image->AsPlanarYCbCrImage()->AllocateAndGetNewBuffer(size); libyuv::ConvertToI420(srcPtr, size, dstPtr, dstWidth, dstPtr + (dstWidth * dstHeight), half_width,
--- a/gfx/layers/GrallocImages.cpp +++ b/gfx/layers/GrallocImages.cpp @@ -142,21 +142,22 @@ GrallocImage::SetData(const Data& aData) // gralloc hal could map gralloc buffer only when the buffer is locked, // though some gralloc hals implementation maps it when it is allocated. mData.mYChannel = nullptr; mData.mCrChannel = nullptr; mData.mCbChannel = nullptr; return true; } -bool GrallocImage::SetData(const GrallocData& aData) +void +GrallocImage::SetData(TextureClient* aGraphicBuffer, const gfx::IntSize& aSize) { - mTextureClient = static_cast<GrallocTextureClientOGL*>(aData.mGraphicBuffer.get()); - mSize = aData.mPicSize; - return true; + MOZ_ASSERT(aGraphicBuffer->AsGrallocTextureClientOGL()); + mTextureClient = aGraphicBuffer->AsGrallocTextureClientOGL(); + mSize = aSize; } /** * Converts YVU420 semi planar frames to RGB565, possibly taking different * stride values. * Needed because the Android ColorConverter class assumes that the Y and UV * channels have equal stride. */
--- a/gfx/layers/GrallocImages.h +++ b/gfx/layers/GrallocImages.h @@ -48,36 +48,31 @@ GetDataSourceSurfaceFrom(android::sp<and * mPicX, mPicY and mPicSize. The size of the rendered image is * mPicSize, not mYSize or mCbCrSize. */ class GrallocImage : public RecyclingPlanarYCbCrImage { typedef PlanarYCbCrData Data; static int32_t sColorIdMap[]; public: - struct GrallocData { - RefPtr<TextureClient> mGraphicBuffer; - gfx::IntSize mPicSize; - }; - GrallocImage(); virtual ~GrallocImage(); /** * This makes a copy of the data buffers, in order to support functioning * in all different layer managers. */ virtual bool SetData(const Data& aData); /** * Share the SurfaceDescriptor without making the copy, in order * to support functioning in all different layer managers. */ - virtual bool SetData(const GrallocData& aData); + void SetData(TextureClient* aGraphicBuffer, const gfx::IntSize& aSize); // From [android 4.0.4]/hardware/msm7k/libgralloc-qsd8k/gralloc_priv.h enum { /* OEM specific HAL formats */ HAL_PIXEL_FORMAT_YCbCr_422_P = 0x102, HAL_PIXEL_FORMAT_YCbCr_420_P = 0x103, HAL_PIXEL_FORMAT_YCbCr_420_SP = 0x109, HAL_PIXEL_FORMAT_YCrCb_420_SP_ADRENO = 0x10A,
--- a/gfx/layers/ImageContainer.cpp +++ b/gfx/layers/ImageContainer.cpp @@ -50,33 +50,16 @@ Atomic<int32_t> Image::sSerialCounter(0) Atomic<uint32_t> ImageContainer::sGenerationCounter(0); already_AddRefed<Image> ImageFactory::CreateImage(ImageFormat aFormat, const gfx::IntSize &, BufferRecycleBin *aRecycleBin) { - RefPtr<Image> img; -#ifdef MOZ_WIDGET_GONK - if (aFormat == ImageFormat::GRALLOC_PLANAR_YCBCR) { - img = new GrallocImage(); - return img.forget(); - } - if (aFormat == ImageFormat::OVERLAY_IMAGE) { - img = new OverlayImage(); - return img.forget(); - } -#endif -#if defined(MOZ_WIDGET_GONK) && defined(MOZ_B2G_CAMERA) && defined(MOZ_WEBRTC) - if (aFormat == ImageFormat::GONK_CAMERA_IMAGE) { - img = new GonkCameraImage(); - return img.forget(); - } -#endif return nullptr; } RefPtr<PlanarYCbCrImage> ImageFactory::CreatePlanarYCbCrImage(const gfx::IntSize& aScaleHint, BufferRecycleBin *aRecycleBin) { return new RecyclingPlanarYCbCrImage(aRecycleBin); } @@ -178,29 +161,16 @@ ImageContainer::~ImageContainer() ImageBridgeChild::DispatchReleaseImageClient(mImageClient, mIPDLChild); } } already_AddRefed<Image> ImageContainer::CreateImage(ImageFormat aFormat) { ReentrantMonitorAutoEnter mon(mReentrantMonitor); - -#ifdef MOZ_WIDGET_GONK - if (aFormat == ImageFormat::OVERLAY_IMAGE) { - if (mImageClient && mImageClient->GetTextureInfo().mCompositableType != CompositableType::IMAGE_OVERLAY) { - // If this ImageContainer is async but the image type mismatch, fix it here - if (ImageBridgeChild::IsCreated()) { - ImageBridgeChild::DispatchReleaseImageClient(mImageClient); - mImageClient = ImageBridgeChild::GetSingleton()->CreateImageClient( - CompositableType::IMAGE_OVERLAY, this).take(); - } - } - } -#endif if (mImageClient) { RefPtr<Image> img = mImageClient->CreateImage(aFormat); if (img) { return img.forget(); } } return mImageFactory->CreateImage(aFormat, mScaleHint, mRecycleBin); } @@ -220,16 +190,33 @@ ImageContainer::CreateSharedRGBImage() { ReentrantMonitorAutoEnter mon(mReentrantMonitor); if (!mImageClient || !mImageClient->AsImageClientSingle()) { return nullptr; } return new SharedRGBImage(mImageClient); } +#ifdef MOZ_WIDGET_GONK +RefPtr<OverlayImage> +ImageContainer::CreateOverlayImage() +{ + ReentrantMonitorAutoEnter mon(mReentrantMonitor); + if (mImageClient && mImageClient->GetTextureInfo().mCompositableType != CompositableType::IMAGE_OVERLAY) { + // If this ImageContainer is async but the image type mismatch, fix it here + if (ImageBridgeChild::IsCreated()) { + ImageBridgeChild::DispatchReleaseImageClient(mImageClient); + mImageClient = ImageBridgeChild::GetSingleton()->CreateImageClient( + CompositableType::IMAGE_OVERLAY, this).take(); + } + } + return new OverlayImage(); +} +#endif + void ImageContainer::SetCurrentImageInternal(const nsTArray<NonOwningImage>& aImages) { ReentrantMonitorAutoEnter mon(mReentrantMonitor); mGenerationCounter = ++sGenerationCounter; if (!aImages.IsEmpty()) {
--- a/gfx/layers/ImageContainer.h +++ b/gfx/layers/ImageContainer.h @@ -113,23 +113,24 @@ struct ImageBackendData virtual ~ImageBackendData() {} protected: ImageBackendData() {} }; /* Forward declarations for Image derivatives. */ class EGLImageImage; +class SharedRGBImage; #ifdef MOZ_WIDGET_ANDROID class SurfaceTextureImage; -#endif -#ifdef XP_MACOSX +#elif defined(XP_MACOSX) class MacIOSurfaceImage; +#elif defined(MOZ_WIDGET_GONK) +class OverlayImage; #endif -class SharedRGBImage; /** * A class representing a buffer of pixel data. The data can be in one * of various formats including YCbCr. * * Create an image using an ImageContainer. Fill the image with data, and * then call ImageContainer::SetImage to display it. An image must not be * modified after calling SetImage. Image implementations do not need to @@ -327,16 +328,20 @@ public: */ B2G_ACL_EXPORT already_AddRefed<Image> CreateImage(ImageFormat aFormat); RefPtr<PlanarYCbCrImage> CreatePlanarYCbCrImage(); // Factory methods for shared image types. RefPtr<SharedRGBImage> CreateSharedRGBImage(); +#ifdef MOZ_WIDGET_GONK + RefPtr<OverlayImage> CreateOverlayImage(); +#endif + struct NonOwningImage { explicit NonOwningImage(Image* aImage = nullptr, TimeStamp aTimeStamp = TimeStamp(), FrameID aFrameID = 0, ProducerID aProducerID = 0) : mImage(aImage), mTimeStamp(aTimeStamp), mFrameID(aFrameID), mProducerID(aProducerID) {} Image* mImage;
--- a/gfx/layers/client/ImageClient.cpp +++ b/gfx/layers/client/ImageClient.cpp @@ -315,26 +315,17 @@ ImageClientBridge::UpdateImage(ImageCont mAsyncContainerID = aContainer->GetAsyncContainerID(); static_cast<ShadowLayerForwarder*>(GetForwarder())->AttachAsyncCompositable(mAsyncContainerID, mLayer); return true; } already_AddRefed<Image> ImageClientSingle::CreateImage(ImageFormat aFormat) { - RefPtr<Image> img; - switch (aFormat) { -#ifdef MOZ_WIDGET_GONK - case ImageFormat::GRALLOC_PLANAR_YCBCR: - img = new GrallocImage(); - return img.forget(); -#endif - default: - return nullptr; - } + return nullptr; } #ifdef MOZ_WIDGET_GONK ImageClientOverlay::ImageClientOverlay(CompositableForwarder* aFwd, TextureFlags aFlags) : ImageClient(aFwd, aFlags, CompositableType::IMAGE_OVERLAY) { } @@ -361,25 +352,11 @@ ImageClientOverlay::UpdateImage(ImageCon OverlaySource source; source.handle() = OverlayHandle(overlayId); source.size() = size; GetForwarder()->UseOverlaySource(this, source, image->GetPictureRect()); } return true; } - -already_AddRefed<Image> -ImageClientOverlay::CreateImage(ImageFormat aFormat) -{ - RefPtr<Image> img; - switch (aFormat) { - case ImageFormat::OVERLAY_IMAGE: - img = new OverlayImage(); - return img.forget(); - default: - return nullptr; - } -} - #endif } // namespace layers } // namespace mozilla
--- a/gfx/layers/client/ImageClient.h +++ b/gfx/layers/client/ImageClient.h @@ -160,20 +160,23 @@ protected: */ class ImageClientOverlay : public ImageClient { public: ImageClientOverlay(CompositableForwarder* aFwd, TextureFlags aFlags); virtual bool UpdateImage(ImageContainer* aContainer, uint32_t aContentFlags); - virtual already_AddRefed<Image> CreateImage(ImageFormat aFormat); TextureInfo GetTextureInfo() const override { return TextureInfo(CompositableType::IMAGE_OVERLAY); } + + already_AddRefed<Image> CreateImage(ImageFormat aFormat) override { + return nullptr; + } }; #endif } // namespace layers } // namespace mozilla #endif
--- a/gfx/layers/client/TextureClient.h +++ b/gfx/layers/client/TextureClient.h @@ -51,16 +51,17 @@ class PTextureChild; class TextureChild; class BufferTextureClient; class TextureClient; class TextureClientRecycleAllocator; #ifdef GFX_DEBUG_TRACK_CLIENTS_IN_POOL class TextureClientPool; #endif class KeepAlive; +class GrallocTextureClientOGL; /** * TextureClient is the abstraction that allows us to share data between the * content and the compositor side. */ enum TextureAllocationFlags { ALLOC_DEFAULT = 0, @@ -229,16 +230,17 @@ public: */ virtual bool AllocateForSurface(gfx::IntSize aSize, TextureAllocationFlags flags = ALLOC_DEFAULT) { return false; } virtual TextureClientYCbCr* AsTextureClientYCbCr() { return nullptr; } + virtual GrallocTextureClientOGL* AsGrallocTextureClientOGL() { return nullptr; } /** * Locks the shared data, allowing the caller to get access to it. * * Please always lock/unlock when accessing the shared data. * If Lock() returns false, you should not attempt to access the shared data. */ virtual bool Lock(OpenMode aMode) { return IsValid(); }
--- a/gfx/layers/opengl/GrallocTextureClient.h +++ b/gfx/layers/opengl/GrallocTextureClient.h @@ -59,16 +59,20 @@ public: virtual bool IsAllocated() const override; virtual bool ToSurfaceDescriptor(SurfaceDescriptor& aOutDescriptor) override; virtual void SetRemoveFromCompositableWaiter(AsyncTransactionWaiter* aWaiter) override; virtual void WaitForBufferOwnership(bool aWaitReleaseFence = true) override; + GrallocTextureClientOGL* AsGrallocTextureClientOGL() override { + return this; + } + void SetTextureFlags(TextureFlags aFlags) { AddFlags(aFlags); } gfx::IntSize GetSize() const override { return mSize; } android::sp<android::GraphicBuffer> GetGraphicBuffer() { return mGraphicBuffer; }
--- a/media/webrtc/signaling/src/media-conduit/WebrtcOMXH264VideoCodec.cpp +++ b/media/webrtc/signaling/src/media-conduit/WebrtcOMXH264VideoCodec.cpp @@ -519,22 +519,19 @@ public: void OnNewFrame() override { RefPtr<layers::TextureClient> buffer = mNativeWindow->getCurrentBuffer(); if (!buffer) { CODEC_LOGE("Decoder NewFrame: Get null buffer"); return; } - layers::GrallocImage::GrallocData grallocData; - grallocData.mPicSize = buffer->GetSize(); - grallocData.mGraphicBuffer = buffer; - + gfx::IntSize picSize(buffer->GetSize()); nsAutoPtr<layers::GrallocImage> grallocImage(new layers::GrallocImage()); - grallocImage->SetData(grallocData); + grallocImage->SetData(buffer, picSize); // Get timestamp of the frame about to render. int64_t timestamp = -1; int64_t renderTimeMs = -1; { MutexAutoLock lock(mDecodedFrameLock); if (mDecodedFrames.empty()) { return; @@ -542,22 +539,22 @@ public: EncodedFrame decoded = mDecodedFrames.front(); timestamp = decoded.mTimestamp; renderTimeMs = decoded.mRenderTimeMs; mDecodedFrames.pop(); } MOZ_ASSERT(timestamp >= 0 && renderTimeMs >= 0); CODEC_LOGD("Decoder NewFrame: %dx%d, timestamp %lld, renderTimeMs %lld", - grallocData.mPicSize.width, grallocData.mPicSize.height, timestamp, renderTimeMs); + picSize.width, picSize.height, timestamp, renderTimeMs); nsAutoPtr<webrtc::I420VideoFrame> videoFrame( new webrtc::TextureVideoFrame(new ImageNativeHandle(grallocImage.forget()), - grallocData.mPicSize.width, - grallocData.mPicSize.height, + picSize.width, + picSize.height, timestamp, renderTimeMs)); if (videoFrame != nullptr) { mCallback->Decoded(*videoFrame); } } private:
--- a/media/webrtc/signaling/src/mediapipeline/MediaPipeline.cpp +++ b/media/webrtc/signaling/src/mediapipeline/MediaPipeline.cpp @@ -1476,19 +1476,17 @@ void MediaPipelineReceiveVideo::Pipeline #if defined(MOZILLA_XPCOMRT_API) if (buffer) { image_->SetImage(buffer, buffer_size, width_, height_); } #elif defined(MOZILLA_INTERNAL_API) if (buffer) { // Create a video frame using |buffer|. #ifdef MOZ_WIDGET_GONK - ImageFormat format = ImageFormat::GRALLOC_PLANAR_YCBCR; - RefPtr<Image> image = image_container_->CreateImage(format); - PlanarYCbCrImage* yuvImage = static_cast<PlanarYCbCrImage*>(image.get()); + RefPtr<PlanarYCbCrImage> yuvImage = new GrallocImage(); #else RefPtr<PlanarYCbCrImage> yuvImage = image_container_->CreatePlanarYCbCrImage(); #endif uint8_t* frame = const_cast<uint8_t*>(static_cast<const uint8_t*> (buffer)); PlanarYCbCrData yuvData; yuvData.mYChannel = frame; yuvData.mYSize = IntSize(width_, height_);