Backed out changeset 6f22cc93383c (
bug 1397141) for bustage at dom/media/ipc/VideoDecoderParent.cpp:87 (undeclared variable). r=backout on a CLOSED TREE
--- a/dom/media/platforms/wmf/WMFDecoderModule.cpp
+++ b/dom/media/platforms/wmf/WMFDecoderModule.cpp
@@ -105,21 +105,17 @@ WMFDecoderModule::CreateVideoDecoder(con
}
nsAutoPtr<WMFVideoMFTManager> manager(
new WMFVideoMFTManager(aParams.VideoConfig(),
aParams.mKnowsCompositor,
aParams.mImageContainer,
sDXVAEnabled));
- MediaResult result = manager->Init();
- if (NS_FAILED(result)) {
- if (aParams.mError) {
- *aParams.mError = result;
- }
+ if (!manager->Init()) {
return nullptr;
}
RefPtr<MediaDataDecoder> decoder =
new WMFMediaDataDecoder(manager.forget(), aParams.mTaskQueue);
return decoder.forget();
}
--- a/dom/media/platforms/wmf/WMFVideoMFTManager.cpp
+++ b/dom/media/platforms/wmf/WMFVideoMFTManager.cpp
@@ -518,36 +518,34 @@ WMFVideoMFTManager::InitializeDXVA()
mozilla::SyncRunnable::DispatchToThread(
SystemGroup::EventTargetFor(mozilla::TaskCategory::Other), event);
}
mDXVA2Manager = event->mDXVA2Manager;
return mDXVA2Manager != nullptr;
}
-MediaResult
+bool
WMFVideoMFTManager::ValidateVideoInfo()
{
// The WMF H.264 decoder is documented to have a minimum resolution
// 48x48 pixels. We've observed the decoder working for output smaller than
// that, but on some output it hangs in IMFTransform::ProcessOutput(), so
// we just reject streams which are less than the documented minimum.
// https://msdn.microsoft.com/en-us/library/windows/desktop/dd797815(v=vs.85).aspx
static const int32_t MIN_H264_FRAME_DIMENSION = 48;
if (mStreamType == H264 &&
(mVideoInfo.mImage.width < MIN_H264_FRAME_DIMENSION ||
mVideoInfo.mImage.height < MIN_H264_FRAME_DIMENSION)) {
+ LogToBrowserConsole(NS_LITERAL_STRING(
+ "Can't decode H.264 stream with width or height less than 48 pixels."));
mIsValid = false;
- return MediaResult(NS_ERROR_DOM_MEDIA_FATAL_ERR,
- RESULT_DETAIL("Can't decode H.264 stream with width or "
- "height less than 48 pixels."));
}
- return MediaResult(NS_OK);
-
+ return mIsValid;
}
already_AddRefed<MFTDecoder>
WMFVideoMFTManager::LoadAMDVP9Decoder()
{
MOZ_ASSERT(mStreamType == VP9);
RefPtr<MFTDecoder> decoder = new MFTDecoder();
@@ -566,49 +564,48 @@ WMFVideoMFTManager::LoadAMDVP9Decoder()
if (!decoderDLL) {
return nullptr;
}
hr = decoder->Create(decoderDLL, CLSID_AMDWebmMfVp9Dec);
NS_ENSURE_TRUE(SUCCEEDED(hr), nullptr);
return decoder.forget();
}
-MediaResult
+bool
WMFVideoMFTManager::Init()
{
- MediaResult result = ValidateVideoInfo();
- if (NS_FAILED(result)) {
- return result;
+ if (!ValidateVideoInfo()) {
+ return false;
}
- result = InitInternal();
- if (NS_FAILED(result) && mAMDVP9InUse) {
+ bool success = InitInternal();
+ if (!success && mAMDVP9InUse) {
// Something failed with the AMD VP9 decoder; attempt again defaulting back
// to Microsoft MFT.
mCheckForAMDDecoder = false;
if (mDXVA2Manager) {
DeleteOnMainThread(mDXVA2Manager);
}
- result = InitInternal();
+ success = InitInternal();
}
- if (NS_SUCCEEDED(result) && mDXVA2Manager) {
+ if (success && mDXVA2Manager) {
// If we had some failures but eventually made it work,
// make sure we preserve the messages.
if (mDXVA2Manager->IsD3D11()) {
mDXVAFailureReason.Append(NS_LITERAL_CSTRING("Using D3D11 API"));
} else {
mDXVAFailureReason.Append(NS_LITERAL_CSTRING("Using D3D9 API"));
}
}
- return result;
+ return success;
}
-MediaResult
+bool
WMFVideoMFTManager::InitInternal()
{
// The H264 SanityTest uses a 132x132 videos to determine if DXVA can be used.
// so we want to use the software decoder for videos with lower resolutions.
static const int MIN_H264_HW_WIDTH = 132;
static const int MIN_H264_HW_HEIGHT = 132;
mUseHwAccel = false; // default value; changed if D3D setup succeeds.
@@ -626,19 +623,17 @@ WMFVideoMFTManager::InitInternal()
mAMDVP9InUse = true;
}
}
if (!decoder) {
mCheckForAMDDecoder = false;
mAMDVP9InUse = false;
decoder = new MFTDecoder();
hr = decoder->Create(GetMFTGUID());
- NS_ENSURE_TRUE(SUCCEEDED(hr),
- MediaResult(NS_ERROR_DOM_MEDIA_FATAL_ERR,
- RESULT_DETAIL("Can't create the MFT decoder.")));
+ NS_ENSURE_TRUE(SUCCEEDED(hr), false);
}
RefPtr<IMFAttributes> attr(decoder->GetAttributes());
UINT32 aware = 0;
if (attr) {
attr->GetUINT32(MF_SA_D3D_AWARE, &aware);
attr->SetUINT32(CODECAPI_AVDecNumWorkerThreads,
WMFDecoderModule::GetNumDecoderThreads());
@@ -670,54 +665,46 @@ WMFVideoMFTManager::InitInternal()
}
else {
mDXVAFailureReason.AssignLiteral(
"Decoder returned false for MF_SA_D3D_AWARE");
}
}
if (!mUseHwAccel) {
+ // Use VP8/9 MFT only if HW acceleration is available
if (mStreamType == VP9 || mStreamType == VP8) {
- return MediaResult(NS_ERROR_DOM_MEDIA_FATAL_ERR,
- RESULT_DETAIL("Use VP8/9 MFT only if HW acceleration "
- "is available."));
+ return false;
}
Telemetry::Accumulate(Telemetry::MEDIA_DECODER_BACKEND_USED,
uint32_t(media::MediaDecoderBackend::WMFSoftware));
}
mDecoder = decoder;
hr = SetDecoderMediaTypes();
- NS_ENSURE_TRUE(SUCCEEDED(hr),
- MediaResult(NS_ERROR_DOM_MEDIA_FATAL_ERR,
- RESULT_DETAIL("Fail to set the decoder media types.")));
+ NS_ENSURE_TRUE(SUCCEEDED(hr), false);
RefPtr<IMFMediaType> outputType;
hr = mDecoder->GetOutputMediaType(outputType);
- NS_ENSURE_TRUE(SUCCEEDED(hr),
- MediaResult(NS_ERROR_DOM_MEDIA_FATAL_ERR,
- RESULT_DETAIL("Fail to get the output media type.")));
+ NS_ENSURE_TRUE(SUCCEEDED(hr), hr);
if (mUseHwAccel && !CanUseDXVA(outputType)) {
mDXVAEnabled = false;
// DXVA initialization with current decoder actually failed,
// re-do initialization.
return InitInternal();
}
LOG("Video Decoder initialized, Using DXVA: %s",
(mUseHwAccel ? "Yes" : "No"));
if (mDXVA2Manager) {
hr = mDXVA2Manager->ConfigureForSize(mVideoInfo.ImageRect().width,
mVideoInfo.ImageRect().height);
- NS_ENSURE_TRUE(SUCCEEDED(hr),
- MediaResult(NS_ERROR_DOM_MEDIA_FATAL_ERR,
- RESULT_DETAIL("Fail to configure image size for "
- "DXVA2Manager.")));
+ NS_ENSURE_TRUE(SUCCEEDED(hr), hr);
} else {
GetDefaultStride(outputType, mVideoInfo.ImageRect().width, &mVideoStride);
}
LOG("WMFVideoMFTManager frame geometry stride=%u picture=(%d, %d, %d, %d) "
"display=(%d,%d)",
mVideoStride,
mVideoInfo.ImageRect().x,
mVideoInfo.ImageRect().y,
@@ -737,17 +724,17 @@ WMFVideoMFTManager::InitInternal()
HRESULT hr =
device->QueryInterface((ID3D10Multithread**)getter_AddRefs(multi));
if (SUCCEEDED(hr) && multi) {
multi->SetMultithreadProtected(TRUE);
mIMFUsable = true;
}
}
}
- return MediaResult(NS_OK);
+ return true;
}
HRESULT
WMFVideoMFTManager::SetDecoderMediaTypes()
{
// Setup the input/output media types.
RefPtr<IMFMediaType> inputType;
HRESULT hr = wmf::MFCreateMediaType(getter_AddRefs(inputType));
--- a/dom/media/platforms/wmf/WMFVideoMFTManager.h
+++ b/dom/media/platforms/wmf/WMFVideoMFTManager.h
@@ -3,17 +3,16 @@
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
#if !defined(WMFVideoMFTManager_h_)
#define WMFVideoMFTManager_h_
#include "MFTDecoder.h"
-#include "MediaResult.h"
#include "WMF.h"
#include "WMFMediaDataDecoder.h"
#include "mozilla/Atomics.h"
#include "mozilla/RefPtr.h"
#include "nsAutoPtr.h"
#include "mozilla/gfx/Rect.h"
namespace mozilla {
@@ -24,17 +23,17 @@ class WMFVideoMFTManager : public MFTMan
{
public:
WMFVideoMFTManager(const VideoInfo& aConfig,
layers::KnowsCompositor* aKnowsCompositor,
layers::ImageContainer* aImageContainer,
bool aDXVAEnabled);
~WMFVideoMFTManager();
- MediaResult Init();
+ bool Init();
HRESULT Input(MediaRawData* aSample) override;
HRESULT Output(int64_t aStreamOffset, RefPtr<MediaData>& aOutput) override;
void Shutdown() override;
bool IsHardwareAccelerated(nsACString& aFailureReason) const override;
@@ -59,21 +58,21 @@ public:
MediaDataDecoder::ConversionRequired NeedsConversion() const override
{
return mStreamType == H264
? MediaDataDecoder::ConversionRequired::kNeedAnnexB
: MediaDataDecoder::ConversionRequired::kNeedNone;
}
private:
- MediaResult ValidateVideoInfo();
+ bool ValidateVideoInfo();
bool InitializeDXVA();
- MediaResult InitInternal();
+ bool InitInternal();
HRESULT CreateBasicVideoFrame(IMFSample* aSample,
int64_t aStreamOffset,
VideoData** aOutVideoData);
HRESULT CreateD3DVideoFrame(IMFSample* aSample,
int64_t aStreamOffset,
VideoData** aOutVideoData);