Bug 1193547 - Fallback to software decoding explicitly if the GPU doesn't support decoding the current resolution in hardware. r=cpearce,jya
authorMatt Woodrow <mwoodrow@mozilla.com>
Thu, 13 Aug 2015 14:00:47 -0400
changeset 257903 05c65951f21dd1ca7d7193dbbb22f7e164e7bff7
parent 257902 8501dbd5fbb1e9b497b015d95081434a6d4b428a
child 257904 fe6aedfbb45a24f9e445321f229629a3da4d2ecf
push id63755
push usermwoodrow@mozilla.com
push dateFri, 14 Aug 2015 21:53:16 +0000
treeherdermozilla-inbound@05c65951f21d [default view] [failures only]
perfherder[talos] [build metrics] [platform microbench] (compared to previous push)
reviewerscpearce, jya
bugs1193547
milestone43.0a1
first release with
nightly linux32
nightly linux64
nightly mac
nightly win32
nightly win64
last release without
nightly linux32
nightly linux64
nightly mac
nightly win32
nightly win64
Bug 1193547 - Fallback to software decoding explicitly if the GPU doesn't support decoding the current resolution in hardware. r=cpearce,jya
dom/media/platforms/wmf/DXVA2Manager.cpp
dom/media/platforms/wmf/DXVA2Manager.h
dom/media/platforms/wmf/MFTDecoder.h
dom/media/platforms/wmf/WMFDecoderModule.cpp
dom/media/platforms/wmf/WMFDecoderModule.h
dom/media/platforms/wmf/WMFVideoMFTManager.cpp
dom/media/platforms/wmf/WMFVideoMFTManager.h
--- a/dom/media/platforms/wmf/DXVA2Manager.cpp
+++ b/dom/media/platforms/wmf/DXVA2Manager.cpp
@@ -57,24 +57,130 @@ public:
 
   // Copies a region (aRegion) of the video frame stored in aVideoSample
   // into an image which is returned by aOutImage.
   HRESULT CopyToImage(IMFSample* aVideoSample,
                       const nsIntRect& aRegion,
                       ImageContainer* aContainer,
                       Image** aOutImage) override;
 
+  virtual bool SupportsConfig(IMFMediaType* aType) override;
+
 private:
   nsRefPtr<IDirect3D9Ex> mD3D9;
   nsRefPtr<IDirect3DDevice9Ex> mDevice;
   nsRefPtr<IDirect3DDeviceManager9> mDeviceManager;
   RefPtr<D3D9RecycleAllocator> mTextureClientAllocator;
+  nsRefPtr<IDirectXVideoDecoderService> mDecoderService;
   UINT32 mResetToken;
 };
 
+void GetDXVA2ExtendedFormatFromMFMediaType(IMFMediaType *pType,
+                                           DXVA2_ExtendedFormat *pFormat)
+{
+  // Get the interlace mode.
+  MFVideoInterlaceMode interlace =
+    (MFVideoInterlaceMode)MFGetAttributeUINT32(pType, MF_MT_INTERLACE_MODE, MFVideoInterlace_Unknown);
+
+  if (interlace == MFVideoInterlace_MixedInterlaceOrProgressive) {
+    pFormat->SampleFormat = DXVA2_SampleFieldInterleavedEvenFirst;
+  } else {
+    pFormat->SampleFormat = (UINT)interlace;
+  }
+
+  pFormat->VideoChromaSubsampling =
+    MFGetAttributeUINT32(pType, MF_MT_VIDEO_CHROMA_SITING, MFVideoChromaSubsampling_Unknown);
+  pFormat->NominalRange =
+    MFGetAttributeUINT32(pType, MF_MT_VIDEO_NOMINAL_RANGE, MFNominalRange_Unknown);
+  pFormat->VideoTransferMatrix =
+    MFGetAttributeUINT32(pType, MF_MT_YUV_MATRIX, MFVideoTransferMatrix_Unknown);
+  pFormat->VideoLighting =
+    MFGetAttributeUINT32(pType, MF_MT_VIDEO_LIGHTING, MFVideoLighting_Unknown);
+  pFormat->VideoPrimaries =
+    MFGetAttributeUINT32(pType, MF_MT_VIDEO_PRIMARIES, MFVideoPrimaries_Unknown);
+  pFormat->VideoTransferFunction =
+    MFGetAttributeUINT32(pType, MF_MT_TRANSFER_FUNCTION, MFVideoTransFunc_Unknown);
+}
+
+HRESULT ConvertMFTypeToDXVAType(IMFMediaType *pType, DXVA2_VideoDesc *pDesc)
+{
+  ZeroMemory(pDesc, sizeof(*pDesc));
+
+  // The D3D format is the first DWORD of the subtype GUID.
+  GUID subtype = GUID_NULL;
+  HRESULT hr = pType->GetGUID(MF_MT_SUBTYPE, &subtype);
+  NS_ENSURE_TRUE(SUCCEEDED(hr), hr);
+  pDesc->Format = (D3DFORMAT)subtype.Data1;
+
+  UINT32 width = 0;
+  UINT32 height = 0;
+  hr = MFGetAttributeSize(pType, MF_MT_FRAME_SIZE, &width, &height);
+  NS_ENSURE_TRUE(SUCCEEDED(hr), hr);
+  pDesc->SampleWidth = width;
+  pDesc->SampleHeight = height;
+
+  UINT32 fpsNumerator = 0;
+  UINT32 fpsDenominator = 0;
+  hr = MFGetAttributeRatio(pType, MF_MT_FRAME_RATE, &fpsNumerator, &fpsDenominator);
+  NS_ENSURE_TRUE(SUCCEEDED(hr), hr);
+  pDesc->InputSampleFreq.Numerator = fpsNumerator;
+  pDesc->InputSampleFreq.Denominator = fpsDenominator;
+
+  GetDXVA2ExtendedFormatFromMFMediaType(pType, &pDesc->SampleFormat);
+  pDesc->OutputFrameFreq = pDesc->InputSampleFreq;
+  if ((pDesc->SampleFormat.SampleFormat == DXVA2_SampleFieldInterleavedEvenFirst) ||
+      (pDesc->SampleFormat.SampleFormat == DXVA2_SampleFieldInterleavedOddFirst)) {
+    pDesc->OutputFrameFreq.Numerator *= 2;
+  }
+
+  return S_OK;
+}
+
+static const GUID DXVA2_ModeH264_E = {
+  0x1b81be68, 0xa0c7, 0x11d3, { 0xb9, 0x84, 0x00, 0xc0, 0x4f, 0x2e, 0x73, 0xc5 }
+};
+
+// This tests if a DXVA video decoder can be created for the given media type/resolution.
+// It uses the same decoder device (DXVA2_ModeH264_E - DXVA2_ModeH264_VLD_NoFGT) as the H264
+// decoder MFT provided by windows (CLSID_CMSH264DecoderMFT) uses, so we can use it to determine
+// if the MFT will use software fallback or not.
+bool
+D3D9DXVA2Manager::SupportsConfig(IMFMediaType* aType)
+{
+  DXVA2_VideoDesc desc;
+  HRESULT hr = ConvertMFTypeToDXVAType(aType, &desc);
+  NS_ENSURE_TRUE(SUCCEEDED(hr), false);
+
+  UINT configCount;
+  DXVA2_ConfigPictureDecode* configs = nullptr;
+  hr = mDecoderService->GetDecoderConfigurations(DXVA2_ModeH264_E, &desc, nullptr, &configCount, &configs);
+  NS_ENSURE_TRUE(SUCCEEDED(hr), false);
+
+  nsRefPtr<IDirect3DSurface9> surface;
+  hr = mDecoderService->CreateSurface(desc.SampleWidth, desc.SampleHeight, 0, (D3DFORMAT)MAKEFOURCC('N', 'V', '1', '2'),
+  D3DPOOL_DEFAULT, 0, DXVA2_VideoDecoderRenderTarget,
+  surface.StartAssignment(), NULL);
+  if (!SUCCEEDED(hr)) {
+    CoTaskMemFree(configs);
+    return false;
+  }
+
+  for (UINT i = 0; i < configCount; i++) {
+    nsRefPtr<IDirectXVideoDecoder> decoder;
+    IDirect3DSurface9* surfaces = surface;
+    hr = mDecoderService->CreateVideoDecoder(DXVA2_ModeH264_E, &desc, &configs[i], &surfaces, 1, decoder.StartAssignment());
+    if (SUCCEEDED(hr) && decoder) {
+      CoTaskMemFree(configs);
+      return true;
+    }
+  }
+  CoTaskMemFree(configs);
+  return false;
+}
+
 D3D9DXVA2Manager::D3D9DXVA2Manager()
   : mResetToken(0)
 {
   MOZ_COUNT_CTOR(D3D9DXVA2Manager);
   MOZ_ASSERT(NS_IsMainThread());
 }
 
 D3D9DXVA2Manager::~D3D9DXVA2Manager()
@@ -175,16 +281,45 @@ D3D9DXVA2Manager::Init(nsACString& aFail
     return hr;
   }
   hr = deviceManager->ResetDevice(device, resetToken);
   if (!SUCCEEDED(hr)) {
     aFailureReason = nsPrintfCString("IDirect3DDeviceManager9::ResetDevice failed with error %X", hr);
     return hr;
   }
 
+  HANDLE deviceHandle;
+  nsRefPtr<IDirectXVideoDecoderService> decoderService;
+  hr = deviceManager->OpenDeviceHandle(&deviceHandle);
+  NS_ENSURE_TRUE(SUCCEEDED(hr), hr);
+
+  hr = deviceManager->GetVideoService(deviceHandle, IID_PPV_ARGS(decoderService.StartAssignment()));
+  deviceManager->CloseDeviceHandle(deviceHandle);
+  NS_ENSURE_TRUE(SUCCEEDED(hr), hr);
+
+  UINT deviceCount;
+  GUID* decoderDevices = nullptr;
+  hr = decoderService->GetDecoderDeviceGuids(&deviceCount, &decoderDevices);
+  NS_ENSURE_TRUE(SUCCEEDED(hr), hr);
+
+  bool found = false;
+  for (UINT i = 0; i < deviceCount; i++) {
+    if (decoderDevices[i] == DXVA2_ModeH264_E) {
+      found = true;
+      break;
+    }
+  }
+  CoTaskMemFree(decoderDevices);
+
+  if (!found) {
+    return E_FAIL;
+  }
+
+  mDecoderService = decoderService;
+
   mResetToken = resetToken;
   mD3D9 = d3d9Ex;
   mDevice = device;
   mDeviceManager = deviceManager;
 
   mTextureClientAllocator = new D3D9RecycleAllocator(layers::ImageBridgeChild::GetSingleton(),
                                                      mDevice);
   mTextureClientAllocator->SetMaxPoolSize(5);
--- a/dom/media/platforms/wmf/DXVA2Manager.h
+++ b/dom/media/platforms/wmf/DXVA2Manager.h
@@ -39,16 +39,18 @@ public:
                               layers::Image** aOutImage) = 0;
 
   virtual HRESULT ConfigureForSize(uint32_t aWidth, uint32_t aHeight) { return S_OK; }
 
   virtual bool IsD3D11() { return false; }
 
   virtual ~DXVA2Manager();
 
+  virtual bool SupportsConfig(IMFMediaType* aType) { return true; }
+
 protected:
   Mutex mLock;
   DXVA2Manager();
 };
 
 } // namespace mozilla
 
 #endif // DXVA2Manager_h_
--- a/dom/media/platforms/wmf/MFTDecoder.h
+++ b/dom/media/platforms/wmf/MFTDecoder.h
@@ -54,16 +54,21 @@ public:
   // Returns:
   //  - MF_E_NOTACCEPTING if the decoder can't accept input. The data
   //    must be resubmitted after Output() stops producing output.
   HRESULT Input(const uint8_t* aData,
                 uint32_t aDataSize,
                 int64_t aTimestampUsecs);
   HRESULT Input(IMFSample* aSample);
 
+  HRESULT CreateInputSample(const uint8_t* aData,
+                            uint32_t aDataSize,
+                            int64_t aTimestampUsecs,
+                            RefPtr<IMFSample>* aOutSample);
+
   // Retrieves output from the MFT. Call this once Input() returns
   // MF_E_NOTACCEPTING. Some MFTs with hardware acceleration (the H.264
   // decoder MFT in particular) can't handle it if clients hold onto
   // references to the output IMFSample, so don't do that.
   //
   // Returns:
   //  - MF_E_TRANSFORM_STREAM_CHANGE if the underlying stream output
   //    type changed. Retrieve the output media type and reconfig client,
@@ -75,24 +80,20 @@ public:
 
   // Sends a flush message to the MFT. This causes it to discard all
   // input data. Use before seeking.
   HRESULT Flush();
 
   // Sends a message to the MFT.
   HRESULT SendMFTMessage(MFT_MESSAGE_TYPE aMsg, ULONG_PTR aData);
 
-private:
 
   HRESULT SetDecoderOutputType(ConfigureOutputCallback aCallback, void* aData);
+private:
 
-  HRESULT CreateInputSample(const uint8_t* aData,
-                            uint32_t aDataSize,
-                            int64_t aTimestampUsecs,
-                            RefPtr<IMFSample>* aOutSample);
 
   HRESULT CreateOutputSample(RefPtr<IMFSample>* aOutSample);
 
   MFT_INPUT_STREAM_INFO mInputStreamInfo;
   MFT_OUTPUT_STREAM_INFO mOutputStreamInfo;
 
   RefPtr<IMFTransform> mDecoder;
 
--- a/dom/media/platforms/wmf/WMFDecoderModule.cpp
+++ b/dom/media/platforms/wmf/WMFDecoderModule.cpp
@@ -97,17 +97,17 @@ WMFDecoderModule::CreateVideoDecoder(con
                                      layers::ImageContainer* aImageContainer,
                                      FlushableTaskQueue* aVideoTaskQueue,
                                      MediaDataDecoderCallback* aCallback)
 {
   nsAutoPtr<WMFVideoMFTManager> manager(
     new WMFVideoMFTManager(aConfig,
                            aLayersBackend,
                            aImageContainer,
-                           sDXVAEnabled && ShouldUseDXVA(aConfig)));
+                           sDXVAEnabled));
 
   nsRefPtr<MFTDecoder> mft = manager->Init();
 
   if (!mft) {
     return nullptr;
   }
 
   nsRefPtr<MediaDataDecoder> decoder =
@@ -129,42 +129,21 @@ WMFDecoderModule::CreateAudioDecoder(con
   }
 
   nsRefPtr<MediaDataDecoder> decoder =
     new WMFMediaDataDecoder(manager.forget(), mft, aAudioTaskQueue, aCallback);
   return decoder.forget();
 }
 
 bool
-WMFDecoderModule::ShouldUseDXVA(const VideoInfo& aConfig) const
-{
-  static bool isAMD = false;
-  static bool initialized = false;
-  if (!initialized) {
-    nsCOMPtr<nsIGfxInfo> gfxInfo = services::GetGfxInfo();
-    nsAutoString vendor;
-    gfxInfo->GetAdapterVendorID(vendor);
-    isAMD = vendor.Equals(widget::GfxDriverInfo::GetDeviceVendor(widget::VendorAMD), nsCaseInsensitiveStringComparator()) ||
-            vendor.Equals(widget::GfxDriverInfo::GetDeviceVendor(widget::VendorATI), nsCaseInsensitiveStringComparator());
-    initialized = true;
-  }
-  if (!isAMD) {
-    return true;
-  }
-  // Don't use DXVA for 4k videos or above, since it seems to perform poorly.
-  return aConfig.mDisplay.width <= 1920 && aConfig.mDisplay.height <= 1200;
-}
-
-bool
 WMFDecoderModule::SupportsSharedDecoders(const VideoInfo& aConfig) const
 {
   // If DXVA is enabled, but we're not going to use it for this specific config, then
   // we can't use the shared decoder.
-  return !AgnosticMimeType(aConfig.mMimeType) &&
-    (!sDXVAEnabled || ShouldUseDXVA(aConfig));
+  return !AgnosticMimeType(aConfig.mMimeType);
 }
 
 bool
 WMFDecoderModule::SupportsMimeType(const nsACString& aMimeType)
 {
   return aMimeType.EqualsLiteral("video/mp4") ||
          aMimeType.EqualsLiteral("video/avc") ||
          aMimeType.EqualsLiteral("audio/mp4a-latm") ||
--- a/dom/media/platforms/wmf/WMFDecoderModule.h
+++ b/dom/media/platforms/wmf/WMFDecoderModule.h
@@ -47,15 +47,14 @@ public:
   static bool HasH264();
 
   // Called on main thread.
   static void Init();
 
   // Called from any thread, must call init first
   static int GetNumDecoderThreads();
 private:
-  bool ShouldUseDXVA(const VideoInfo& aConfig) const;
   bool mWMFInitialized;
 };
 
 } // namespace mozilla
 
 #endif
--- a/dom/media/platforms/wmf/WMFVideoMFTManager.cpp
+++ b/dom/media/platforms/wmf/WMFVideoMFTManager.cpp
@@ -209,106 +209,174 @@ WMFVideoMFTManager::InitInternal(bool aF
   RefPtr<MFTDecoder> decoder(new MFTDecoder());
 
   HRESULT hr = decoder->Create(GetMFTGUID());
   NS_ENSURE_TRUE(SUCCEEDED(hr), nullptr);
 
   RefPtr<IMFAttributes> attr(decoder->GetAttributes());
   UINT32 aware = 0;
   if (attr) {
-      attr->GetUINT32(MF_SA_D3D_AWARE, &aware);
-      attr->SetUINT32(CODECAPI_AVDecNumWorkerThreads,
-                      WMFDecoderModule::GetNumDecoderThreads());
-      hr = attr->SetUINT32(CODECAPI_AVLowLatencyMode, TRUE);
-      if (SUCCEEDED(hr)) {
-        LOG("Enabling Low Latency Mode");
-      } else {
-        LOG("Couldn't enable Low Latency Mode");
-      }
+    attr->GetUINT32(MF_SA_D3D_AWARE, &aware);
+    attr->SetUINT32(CODECAPI_AVDecNumWorkerThreads,
+      WMFDecoderModule::GetNumDecoderThreads());
+    hr = attr->SetUINT32(CODECAPI_AVLowLatencyMode, TRUE);
+    if (SUCCEEDED(hr)) {
+      LOG("Enabling Low Latency Mode");
+    }
+    else {
+      LOG("Couldn't enable Low Latency Mode");
+    }
   }
 
   if (useDxva) {
     if (aware) {
       // TODO: Test if I need this anywhere... Maybe on Vista?
       //hr = attr->SetUINT32(CODECAPI_AVDecVideoAcceleration_H264, TRUE);
       //NS_ENSURE_TRUE(SUCCEEDED(hr), hr);
       MOZ_ASSERT(mDXVA2Manager);
       ULONG_PTR manager = ULONG_PTR(mDXVA2Manager->GetDXVADeviceManager());
       hr = decoder->SendMFTMessage(MFT_MESSAGE_SET_D3D_MANAGER, manager);
       if (SUCCEEDED(hr)) {
         mUseHwAccel = true;
       } else {
+        mDXVA2Manager = nullptr;
         mDXVAFailureReason = nsPrintfCString("MFT_MESSAGE_SET_D3D_MANAGER failed with code %X", hr);
       }
-    } else {
+    }
+    else {
       mDXVAFailureReason.AssignLiteral("Decoder returned false for MF_SA_D3D_AWARE");
     }
   }
 
-  // Setup the input/output media types.
-  RefPtr<IMFMediaType> inputType;
-  hr = wmf::MFCreateMediaType(byRef(inputType));
-  NS_ENSURE_TRUE(SUCCEEDED(hr), nullptr);
-
-  hr = inputType->SetGUID(MF_MT_MAJOR_TYPE, MFMediaType_Video);
-  NS_ENSURE_TRUE(SUCCEEDED(hr), nullptr);
-
-  hr = inputType->SetGUID(MF_MT_SUBTYPE, GetMediaSubtypeGUID());
-  NS_ENSURE_TRUE(SUCCEEDED(hr), nullptr);
-
-  hr = inputType->SetUINT32(MF_MT_INTERLACE_MODE, MFVideoInterlace_MixedInterlaceOrProgressive);
+  mDecoder = decoder;
+  hr = SetDecoderMediaTypes();
   NS_ENSURE_TRUE(SUCCEEDED(hr), nullptr);
 
-  RefPtr<IMFMediaType> outputType;
-  hr = wmf::MFCreateMediaType(byRef(outputType));
-  NS_ENSURE_TRUE(SUCCEEDED(hr), nullptr);
-
-  hr = outputType->SetGUID(MF_MT_MAJOR_TYPE, MFMediaType_Video);
-  NS_ENSURE_TRUE(SUCCEEDED(hr), nullptr);
-
-  GUID outputSubType = mUseHwAccel ? MFVideoFormat_NV12 : MFVideoFormat_YV12;
-  hr = outputType->SetGUID(MF_MT_SUBTYPE, outputSubType);
-  NS_ENSURE_TRUE(SUCCEEDED(hr), nullptr);
-
-  hr = decoder->SetMediaTypes(inputType, outputType);
-  NS_ENSURE_TRUE(SUCCEEDED(hr), nullptr);
-
-  mDecoder = decoder;
   LOG("Video Decoder initialized, Using DXVA: %s", (mUseHwAccel ? "Yes" : "No"));
 
   // Just in case ConfigureVideoFrameGeometry() does not set these
   mVideoInfo = VideoInfo();
   mVideoStride = 0;
   mVideoWidth = 0;
   mVideoHeight = 0;
   mPictureRegion.SetEmpty();
 
   return decoder.forget();
 }
 
 HRESULT
+WMFVideoMFTManager::SetDecoderMediaTypes()
+{
+  // Setup the input/output media types.
+  RefPtr<IMFMediaType> inputType;
+  HRESULT hr = wmf::MFCreateMediaType(byRef(inputType));
+  NS_ENSURE_TRUE(SUCCEEDED(hr), hr);
+
+  hr = inputType->SetGUID(MF_MT_MAJOR_TYPE, MFMediaType_Video);
+  NS_ENSURE_TRUE(SUCCEEDED(hr), hr);
+
+  hr = inputType->SetGUID(MF_MT_SUBTYPE, GetMediaSubtypeGUID());
+  NS_ENSURE_TRUE(SUCCEEDED(hr), hr);
+
+  hr = inputType->SetUINT32(MF_MT_INTERLACE_MODE, MFVideoInterlace_MixedInterlaceOrProgressive);
+  NS_ENSURE_TRUE(SUCCEEDED(hr), hr);
+
+  RefPtr<IMFMediaType> outputType;
+  hr = wmf::MFCreateMediaType(byRef(outputType));
+  NS_ENSURE_TRUE(SUCCEEDED(hr), hr);
+
+  hr = outputType->SetGUID(MF_MT_MAJOR_TYPE, MFMediaType_Video);
+  NS_ENSURE_TRUE(SUCCEEDED(hr), hr);
+
+  GUID outputSubType = mUseHwAccel ? MFVideoFormat_NV12 : MFVideoFormat_YV12;
+  hr = outputType->SetGUID(MF_MT_SUBTYPE, outputSubType);
+  NS_ENSURE_TRUE(SUCCEEDED(hr), hr);
+
+  return mDecoder->SetMediaTypes(inputType, outputType);
+}
+
+HRESULT
 WMFVideoMFTManager::Input(MediaRawData* aSample)
 {
   if (!mDecoder) {
     // This can happen during shutdown.
     return E_FAIL;
   }
+
+  HRESULT hr = mDecoder->CreateInputSample(aSample->Data(),
+                                           uint32_t(aSample->Size()),
+                                           aSample->mTime,
+                                           &mLastInput);
+  NS_ENSURE_TRUE(SUCCEEDED(hr) && mLastInput != nullptr, hr);
+
   // Forward sample data to the decoder.
-  return mDecoder->Input(aSample->Data(),
-                         uint32_t(aSample->Size()),
-                         aSample->mTime);
+  return mDecoder->Input(mLastInput);
+}
+
+// The MFTransform we use for decoding h264 video will silently fall
+// back to software decoding (even if we've negotiated DXVA) if the GPU
+// doesn't support decoding the given resolution. It will then upload
+// the software decoded frames into d3d textures to preserve behaviour.
+//
+// Unfortunately this seems to cause corruption (see bug 1193547) and is
+// slow because the upload is done into a non-shareable texture and requires
+// us to copy it.
+//
+// This code tests if the given resolution can be supported directly on the GPU,
+// and makes sure we only ask the MFT for DXVA if it can be supported properly.
+bool
+WMFVideoMFTManager::MaybeToggleDXVA(IMFMediaType* aType)
+{
+  // SupportsConfig only checks for valid h264 decoders currently.
+  if (!mDXVA2Manager || mStreamType != H264) {
+    return false;
+  }
+
+  if (mDXVA2Manager->SupportsConfig(aType)) {
+    if (!mUseHwAccel) {
+      // DXVA disabled, but supported for this resolution
+      ULONG_PTR manager = ULONG_PTR(mDXVA2Manager->GetDXVADeviceManager());
+      HRESULT hr = mDecoder->SendMFTMessage(MFT_MESSAGE_SET_D3D_MANAGER, manager);
+      if (SUCCEEDED(hr)) {
+        mUseHwAccel = true;
+        return true;
+      }
+    }
+  } else if (mUseHwAccel) {
+    // DXVA enabled, and not supported for this resolution
+    HRESULT hr = mDecoder->SendMFTMessage(MFT_MESSAGE_SET_D3D_MANAGER, 0);
+    MOZ_ASSERT(SUCCEEDED(hr), "Attempting to fall back to software failed?");
+    mUseHwAccel = false;
+    return true;
+  }
+
+  return false;
 }
 
 HRESULT
 WMFVideoMFTManager::ConfigureVideoFrameGeometry()
 {
   RefPtr<IMFMediaType> mediaType;
   HRESULT hr = mDecoder->GetOutputMediaType(mediaType);
   NS_ENSURE_TRUE(SUCCEEDED(hr), hr);
 
+  // If we enabled/disabled DXVA in response to a resolution
+  // change then we need to renegotiate our media types,
+  // and resubmit our previous frame (since the MFT appears
+  // to lose it otherwise).
+  if (MaybeToggleDXVA(mediaType)) {
+    hr = SetDecoderMediaTypes();
+    NS_ENSURE_TRUE(SUCCEEDED(hr), hr);
+
+    HRESULT hr = mDecoder->GetOutputMediaType(mediaType);
+    NS_ENSURE_TRUE(SUCCEEDED(hr), hr);
+
+    mDecoder->Input(mLastInput);
+  }
+
   // Verify that the video subtype is what we expect it to be.
   // When using hardware acceleration/DXVA2 the video format should
   // be NV12, which is DXVA2's preferred format. For software decoding
   // we use YV12, as that's easier for us to stick into our rendering
   // pipeline than NV12. NV12 has interleaved UV samples, whereas YV12
   // is a planar format.
   GUID videoFormat;
   hr = mediaType->GetGUID(MF_MT_SUBTYPE, &videoFormat);
--- a/dom/media/platforms/wmf/WMFVideoMFTManager.h
+++ b/dom/media/platforms/wmf/WMFVideoMFTManager.h
@@ -51,27 +51,33 @@ private:
   HRESULT CreateBasicVideoFrame(IMFSample* aSample,
                                 int64_t aStreamOffset,
                                 VideoData** aOutVideoData);
 
   HRESULT CreateD3DVideoFrame(IMFSample* aSample,
                               int64_t aStreamOffset,
                               VideoData** aOutVideoData);
 
+  HRESULT SetDecoderMediaTypes();
+
+  bool MaybeToggleDXVA(IMFMediaType* aType);
+
   // Video frame geometry.
   VideoInfo mVideoInfo;
   uint32_t mVideoStride;
   uint32_t mVideoWidth;
   uint32_t mVideoHeight;
   nsIntRect mPictureRegion;
 
   RefPtr<MFTDecoder> mDecoder;
   RefPtr<layers::ImageContainer> mImageContainer;
   nsAutoPtr<DXVA2Manager> mDXVA2Manager;
 
+  RefPtr<IMFSample> mLastInput;
+
   const bool mDXVAEnabled;
   const layers::LayersBackend mLayersBackend;
   bool mUseHwAccel;
 
   nsCString mDXVAFailureReason;
 
   enum StreamType {
     Unknown,