Merge mozilla-central to autoland. a=merge CLOSED TREE
authorTiberius Oros <toros@mozilla.com>
Mon, 08 Oct 2018 19:21:26 +0300
changeset 495721 1d605da1f66d71f3419c9c3abe9d4eb79a6c2b2d
parent 495720 88544bf70da7924bf86c0f90d6e4d21a5c72fa11 (current diff)
parent 495703 c291143e24019097d087f9307e59b49facaf90cb (diff)
child 495722 741a43f63ec42206b3bf86a33d30b1cd9d449aa3
push id9984
push userffxbld-merge
push dateMon, 15 Oct 2018 21:07:35 +0000
treeherdermozilla-beta@183d27ea8570 [default view] [failures only]
perfherder[talos] [build metrics] [platform microbench] (compared to previous push)
reviewersmerge
milestone64.0a1
first release with
nightly linux32
nightly linux64
nightly mac
nightly win32
nightly win64
last release without
nightly linux32
nightly linux64
nightly mac
nightly win32
nightly win64
Merge mozilla-central to autoland. a=merge CLOSED TREE
--- a/devtools/client/webconsole/test/mochitest/browser_jsterm_screenshot_command_clipboard.js
+++ b/devtools/client/webconsole/test/mochitest/browser_jsterm_screenshot_command_clipboard.js
@@ -171,21 +171,18 @@ async function getImageSizeFromClipboard
   ok(data.value, "screenshot exists");
   ok(dataLength.value > 0, "screenshot has length");
 
   let image = data.value;
   let dataURI = `data:${flavor};base64,`;
 
   // Due to the differences in how images could be stored in the clipboard the
   // checks below are needed. The clipboard could already provide the image as
-  // byte streams, but also as pointer, or as image container. If it's not
-  // possible obtain a byte stream, the function returns `null`.
-  if (image instanceof Ci.nsISupportsInterfacePointer) {
-    image = image.data;
-  }
+  // byte streams or as image container. If it's not possible obtain a
+  // byte stream, the function throws.
 
   if (image instanceof Ci.imgIContainer) {
     image = Cc["@mozilla.org/image/tools;1"]
               .getService(Ci.imgITools)
               .encodeImage(image, flavor);
   }
 
   if (image instanceof Ci.nsIInputStream) {
--- a/devtools/shared/screenshot/save.js
+++ b/devtools/shared/screenshot/save.js
@@ -178,25 +178,23 @@ async function save(args, image) {
 function saveToClipboard(base64URI) {
   try {
     const imageTools = Cc["@mozilla.org/image/tools;1"]
                        .getService(Ci.imgITools);
 
     const base64Data = base64URI.replace("data:image/png;base64,", "");
 
     const image = atob(base64Data);
-    const imgPtr = Cc["@mozilla.org/supports-interface-pointer;1"]
-                   .createInstance(Ci.nsISupportsInterfacePointer);
-    imgPtr.data = imageTools.decodeImageFromBuffer(image, image.length, "image/png");
+    const img = imageTools.decodeImageFromBuffer(image, image.length, "image/png");
 
     const transferable = Cc["@mozilla.org/widget/transferable;1"]
                      .createInstance(Ci.nsITransferable);
     transferable.init(null);
     transferable.addDataFlavor("image/png");
-    transferable.setTransferData("image/png", imgPtr, -1);
+    transferable.setTransferData("image/png", img, -1);
 
     Services.clipboard.setData(transferable, null, Services.clipboard.kGlobalClipboard);
     return L10N.getStr("screenshotCopied");
   } catch (ex) {
     console.error(ex);
     return L10N.getStr("screenshotErrorCopying");
   }
 }
--- a/dom/base/nsContentUtils.cpp
+++ b/dom/base/nsContentUtils.cpp
@@ -7748,24 +7748,17 @@ nsContentUtils::IPCTransferableToTransfe
       NS_ENSURE_SUCCESS(rv, rv);
     } else if (item.data().type() == IPCDataTransferData::TShmem) {
       if (nsContentUtils::IsFlavorImage(item.flavor())) {
         nsCOMPtr<imgIContainer> imageContainer;
         rv = nsContentUtils::DataTransferItemToImage(item,
                                                      getter_AddRefs(imageContainer));
         NS_ENSURE_SUCCESS(rv, rv);
 
-        nsCOMPtr<nsISupportsInterfacePointer> imgPtr =
-          do_CreateInstance(NS_SUPPORTS_INTERFACE_POINTER_CONTRACTID);
-        NS_ENSURE_TRUE(imgPtr, NS_ERROR_FAILURE);
-
-        rv = imgPtr->SetData(imageContainer);
-        NS_ENSURE_SUCCESS(rv, rv);
-
-        aTransferable->SetTransferData(item.flavor().get(), imgPtr, sizeof(nsISupports*));
+        aTransferable->SetTransferData(item.flavor().get(), imageContainer, sizeof(nsISupports*));
       } else {
         nsCOMPtr<nsISupportsCString> dataWrapper =
           do_CreateInstance(NS_SUPPORTS_CSTRING_CONTRACTID, &rv);
         NS_ENSURE_SUCCESS(rv, rv);
 
         // The buffer contains the terminating null.
         Shmem itemData = item.data().get_Shmem();
         const nsDependentCSubstring text(itemData.get<char>(),
@@ -8010,22 +8003,16 @@ nsContentUtils::TransferableToIPCTransfe
 
           Shmem dataAsShmem = ConvertToShmem(aChild, aParent, dataAsString);
           if (!dataAsShmem.IsReadable() || !dataAsShmem.Size<char>()) {
             continue;
           }
 
           item->data() = dataAsShmem;
         } else {
-          nsCOMPtr<nsISupportsInterfacePointer> sip =
-            do_QueryInterface(data);
-          if (sip) {
-            sip->GetData(getter_AddRefs(data));
-          }
-
           // Images to be pasted on the clipboard are nsIInputStreams
           nsCOMPtr<nsIInputStream> stream(do_QueryInterface(data));
           if (stream) {
             IPCDataTransferItem* item = aIPCDataTransfer->items().AppendElement();
             item->flavor() = flavorStr;
 
             nsCString imageData;
             NS_ConsumeStream(stream, UINT32_MAX, imageData);
--- a/dom/base/nsCopySupport.cpp
+++ b/dom/base/nsCopySupport.cpp
@@ -416,25 +416,18 @@ nsCopySupport::ImageCopy(nsIImageLoading
                                           getter_AddRefs(imgRequest));
     NS_ENSURE_TRUE(image, NS_ERROR_FAILURE);
 
 #ifdef XP_WIN
     rv = AppendImagePromise(trans, imgRequest, aImageElement);
     NS_ENSURE_SUCCESS(rv, rv);
 #endif
 
-    nsCOMPtr<nsISupportsInterfacePointer>
-      imgPtr(do_CreateInstance(NS_SUPPORTS_INTERFACE_POINTER_CONTRACTID, &rv));
-    NS_ENSURE_SUCCESS(rv, rv);
-
-    rv = imgPtr->SetData(image);
-    NS_ENSURE_SUCCESS(rv, rv);
-
     // copy the image data onto the transferable
-    rv = trans->SetTransferData(kNativeImageMime, imgPtr,
+    rv = trans->SetTransferData(kNativeImageMime, image,
                                 sizeof(nsISupports*));
     NS_ENSURE_SUCCESS(rv, rv);
   }
 
   // get clipboard
   nsCOMPtr<nsIClipboard> clipboard(do_GetService(kCClipboardCID, &rv));
   NS_ENSURE_SUCCESS(rv, rv);
 
--- a/dom/events/DataTransfer.cpp
+++ b/dom/events/DataTransfer.cpp
@@ -1156,29 +1156,19 @@ DataTransfer::ConvertFromVariant(nsIVari
     }
 
     nsCOMPtr<nsIFlavorDataProvider> fdp = do_QueryInterface(data);
     if (fdp) {
       // for flavour data providers, use kFlavorHasDataProvider (which has the
       // value 0) as the length.
       fdp.forget(aSupports);
       *aLength = nsITransferable::kFlavorHasDataProvider;
-    }
-    else {
-      // wrap the item in an nsISupportsInterfacePointer
-      nsCOMPtr<nsISupportsInterfacePointer> ptrSupports =
-        do_CreateInstance(NS_SUPPORTS_INTERFACE_POINTER_CONTRACTID);
-      if (!ptrSupports) {
-        return false;
-      }
-
-      ptrSupports->SetData(data);
-      ptrSupports.forget(aSupports);
-
-      *aLength = sizeof(nsISupportsInterfacePointer *);
+    } else {
+      data.forget(aSupports);
+      *aLength = sizeof(nsISupports *);
     }
 
     return true;
   }
 
   nsAutoString str;
   nsresult rv = aVariant->GetAsAString(str);
   if (NS_FAILED(rv)) {
--- a/dom/media/MediaData.cpp
+++ b/dom/media/MediaData.cpp
@@ -318,24 +318,28 @@ VideoData::CreateAndCopyData(const Video
                                     aInfo.mDisplay,
                                     0));
 
   // Currently our decoder only knows how to output to ImageFormat::PLANAR_YCBCR
   // format.
 #if XP_WIN
   // We disable this code path on Windows version earlier of Windows 8 due to
   // intermittent crashes with old drivers. See bug 1405110.
-  if (IsWin8OrLater() && !XRE_IsParentProcess() &&
-      aAllocator && aAllocator->SupportsD3D11()) {
+  // D3D11YCbCrImage can only handle YCbCr images using 3 non-interleaved planes
+  // non-zero mSkip value indicates that one of the plane would be interleaved.
+  if (IsWin8OrLater() && !XRE_IsParentProcess() && aAllocator &&
+      aAllocator->SupportsD3D11() && aBuffer.mPlanes[0].mSkip == 0 &&
+      aBuffer.mPlanes[1].mSkip == 0 && aBuffer.mPlanes[2].mSkip == 0) {
     RefPtr<layers::D3D11YCbCrImage> d3d11Image = new layers::D3D11YCbCrImage();
     PlanarYCbCrData data = ConstructPlanarYCbCrData(aInfo, aBuffer, aPicture);
     if (d3d11Image->SetData(layers::ImageBridgeChild::GetSingleton()
-                            ? layers::ImageBridgeChild::GetSingleton().get()
-                            : aAllocator,
-                            aContainer, data)) {
+                              ? layers::ImageBridgeChild::GetSingleton().get()
+                              : aAllocator,
+                            aContainer,
+                            data)) {
       v->mImage = d3d11Image;
       return v.forget();
     }
   }
 #endif
   if (!v->mImage) {
     v->mImage = aContainer->CreatePlanarYCbCrImage();
   }
--- a/dom/media/platforms/wmf/DXVA2Manager.cpp
+++ b/dom/media/platforms/wmf/DXVA2Manager.cpp
@@ -615,20 +615,23 @@ public:
   IUnknown* GetDXVADeviceManager() override;
 
   // Copies a region (aRegion) of the video frame stored in aVideoSample
   // into an image which is returned by aOutImage.
   HRESULT CopyToImage(IMFSample* aVideoSample,
                       const gfx::IntRect& aRegion,
                       Image** aOutImage) override;
 
-  virtual HRESULT CopyToBGRATexture(ID3D11Texture2D *aInTexture,
-                                    ID3D11Texture2D** aOutTexture);
+  HRESULT CopyToBGRATexture(ID3D11Texture2D* aInTexture,
+                            const GUID& aSubType,
+                            ID3D11Texture2D** aOutTexture) override;
 
-  HRESULT ConfigureForSize(uint32_t aWidth, uint32_t aHeight) override;
+  HRESULT ConfigureForSize(IMFMediaType* aInputType,
+                           uint32_t aWidth,
+                           uint32_t aHeight) override;
 
   bool IsD3D11() override { return true; }
 
   bool SupportsConfig(IMFMediaType* aType, float aFramerate) override;
 
 private:
   HRESULT CreateFormatConverter();
 
@@ -647,17 +650,18 @@ private:
   RefPtr<MFTDecoder> mTransform;
   RefPtr<D3D11RecycleAllocator> mTextureClientAllocator;
   RefPtr<ID3D11VideoDecoder> mDecoder;
   RefPtr<layers::SyncObjectClient> mSyncObject;
   GUID mDecoderGUID;
   uint32_t mWidth = 0;
   uint32_t mHeight = 0;
   UINT mDeviceManagerToken = 0;
-  bool mConfiguredForSize = false;
+  RefPtr<IMFMediaType> mInputType;
+  GUID mInputSubType;
 };
 
 bool
 D3D11DXVA2Manager::SupportsConfig(IMFMediaType* aType, float aFramerate)
 {
   MOZ_ASSERT(NS_IsMainThread());
   D3D11_VIDEO_DECODER_DESC desc;
   desc.Guid = mDecoderGUID;
@@ -914,62 +918,68 @@ HRESULT
 D3D11DXVA2Manager::CopyToImage(IMFSample* aVideoSample,
                                const gfx::IntRect& aRegion,
                                Image** aOutImage)
 {
   NS_ENSURE_TRUE(aVideoSample, E_POINTER);
   NS_ENSURE_TRUE(aOutImage, E_POINTER);
   MOZ_ASSERT(mTextureClientAllocator);
 
-  RefPtr<D3D11ShareHandleImage> image =
-    new D3D11ShareHandleImage(gfx::IntSize(mWidth, mHeight), aRegion);
+  RefPtr<D3D11ShareHandleImage> image = new D3D11ShareHandleImage(
+    gfx::IntSize(mWidth, mHeight), aRegion, mInputSubType);
+
+  // Retrieve the DXGI_FORMAT for the current video sample.
+  RefPtr<IMFMediaBuffer> buffer;
+  HRESULT hr = aVideoSample->GetBufferByIndex(0, getter_AddRefs(buffer));
+  NS_ENSURE_TRUE(SUCCEEDED(hr), hr);
+
+  RefPtr<IMFDXGIBuffer> dxgiBuf;
+  hr = buffer->QueryInterface((IMFDXGIBuffer**)getter_AddRefs(dxgiBuf));
+  NS_ENSURE_TRUE(SUCCEEDED(hr), hr);
+
+  RefPtr<ID3D11Texture2D> tex;
+  hr = dxgiBuf->GetResource(__uuidof(ID3D11Texture2D), getter_AddRefs(tex));
+  NS_ENSURE_TRUE(SUCCEEDED(hr), hr);
+
+  D3D11_TEXTURE2D_DESC inDesc;
+  tex->GetDesc(&inDesc);
+
   bool ok = image->AllocateTexture(mTextureClientAllocator, mDevice);
   NS_ENSURE_TRUE(ok, E_FAIL);
 
-  RefPtr<TextureClient> client = image->GetTextureClient(ImageBridgeChild::GetSingleton().get());
+  RefPtr<TextureClient> client =
+    image->GetTextureClient(ImageBridgeChild::GetSingleton().get());
   NS_ENSURE_TRUE(client, E_FAIL);
 
+  RefPtr<ID3D11Texture2D> texture = image->GetTexture();
+  D3D11_TEXTURE2D_DESC outDesc;
+  texture->GetDesc(&outDesc);
+
   RefPtr<IDXGIKeyedMutex> mutex;
-  HRESULT hr = S_OK;
-  RefPtr<ID3D11Texture2D> texture = image->GetTexture();
-
   texture->QueryInterface((IDXGIKeyedMutex**)getter_AddRefs(mutex));
 
   {
     AutoTextureLock(mutex, hr, 2000);
     if (mutex && (FAILED(hr) || hr == WAIT_TIMEOUT || hr == WAIT_ABANDONED)) {
       return hr;
     }
 
     if (!mutex && mDevice != DeviceManagerDx::Get()->GetCompositorDevice()) {
       NS_ENSURE_TRUE(mSyncObject, E_FAIL);
     }
 
-    if (client && client->GetFormat() == SurfaceFormat::NV12) {
+    if (outDesc.Format == inDesc.Format) {
       // Our video frame is stored in a non-sharable ID3D11Texture2D. We need
       // to create a copy of that frame as a sharable resource, save its share
       // handle, and put that handle into the rendering pipeline.
 
-      RefPtr<IMFMediaBuffer> buffer;
-      hr = aVideoSample->GetBufferByIndex(0, getter_AddRefs(buffer));
-      NS_ENSURE_TRUE(SUCCEEDED(hr), hr);
-
-      RefPtr<IMFDXGIBuffer> dxgiBuf;
-      hr = buffer->QueryInterface((IMFDXGIBuffer**)getter_AddRefs(dxgiBuf));
-      NS_ENSURE_TRUE(SUCCEEDED(hr), hr);
-
-      RefPtr<ID3D11Texture2D> tex;
-      hr = dxgiBuf->GetResource(__uuidof(ID3D11Texture2D), getter_AddRefs(tex));
-      NS_ENSURE_TRUE(SUCCEEDED(hr), hr);
-
       UINT index;
       dxgiBuf->GetSubresourceIndex(&index);
       mContext->CopySubresourceRegion(texture, 0, 0, 0, 0, tex, index, nullptr);
     } else {
-      // Our video sample is in NV12 format but our output texture is in BGRA.
       // Use MFT to do color conversion.
       hr = E_FAIL;
       mozilla::mscom::EnsureMTA(
         [&]() -> void { hr = mTransform->Input(aVideoSample); });
       NS_ENSURE_TRUE(SUCCEEDED(hr), hr);
 
       RefPtr<IMFSample> sample;
       hr = CreateOutputSample(sample, texture);
@@ -992,31 +1002,52 @@ D3D11DXVA2Manager::CopyToImage(IMFSample
   }
 
   image.forget(aOutImage);
 
   return S_OK;
 }
 
 HRESULT
-D3D11DXVA2Manager::CopyToBGRATexture(ID3D11Texture2D *aInTexture,
+D3D11DXVA2Manager::CopyToBGRATexture(ID3D11Texture2D* aInTexture,
+                                     const GUID& aSubType,
                                      ID3D11Texture2D** aOutTexture)
 {
   NS_ENSURE_TRUE(aInTexture, E_POINTER);
   NS_ENSURE_TRUE(aOutTexture, E_POINTER);
 
   HRESULT hr;
   RefPtr<ID3D11Texture2D> texture, inTexture;
 
   inTexture = aInTexture;
 
   CD3D11_TEXTURE2D_DESC desc;
   aInTexture->GetDesc(&desc);
-  hr = ConfigureForSize(desc.Width, desc.Height);
-  NS_ENSURE_TRUE(SUCCEEDED(hr), hr);
+
+  if (!mInputType || desc.Width != mWidth || desc.Height != mHeight) {
+    RefPtr<IMFMediaType> inputType;
+    hr = wmf::MFCreateMediaType(getter_AddRefs(inputType));
+    NS_ENSURE_TRUE(SUCCEEDED(hr), hr);
+
+    hr = inputType->SetGUID(MF_MT_MAJOR_TYPE, MFMediaType_Video);
+    NS_ENSURE_TRUE(SUCCEEDED(hr), hr);
+
+    hr = inputType->SetGUID(MF_MT_SUBTYPE, aSubType);
+    NS_ENSURE_TRUE(SUCCEEDED(hr), hr);
+
+    hr =
+      inputType->SetUINT32(MF_MT_INTERLACE_MODE, MFVideoInterlace_Progressive);
+    NS_ENSURE_TRUE(SUCCEEDED(hr), hr);
+
+    hr = inputType->SetUINT32(MF_MT_ALL_SAMPLES_INDEPENDENT, TRUE);
+    NS_ENSURE_TRUE(SUCCEEDED(hr), hr);
+
+    hr = ConfigureForSize(inputType, desc.Width, desc.Height);
+    NS_ENSURE_TRUE(SUCCEEDED(hr), hr);
+  }
 
   RefPtr<IDXGIKeyedMutex> mutex;
   inTexture->QueryInterface((IDXGIKeyedMutex**)getter_AddRefs(mutex));
   // The rest of this function will not work if inTexture implements
   // IDXGIKeyedMutex! In that case case we would have to copy to a
   // non-mutex using texture.
 
   if (mutex) {
@@ -1068,92 +1099,82 @@ D3D11DXVA2Manager::CopyToBGRATexture(ID3
     [&]() -> void { hr = mTransform->Output(&outputSample); });
   NS_ENSURE_TRUE(SUCCEEDED(hr), hr);
 
   texture.forget(aOutTexture);
 
   return S_OK;
 }
 
-HRESULT ConfigureOutput(IMFMediaType* aOutput, void* aData)
+HRESULT
+D3D11DXVA2Manager::ConfigureForSize(IMFMediaType* aInputType,
+                                    uint32_t aWidth,
+                                    uint32_t aHeight)
 {
-  HRESULT hr =
-    aOutput->SetUINT32(MF_MT_INTERLACE_MODE, MFVideoInterlace_Progressive);
-  NS_ENSURE_TRUE(SUCCEEDED(hr), hr);
-
-  hr = aOutput->SetUINT32(MF_MT_ALL_SAMPLES_INDEPENDENT, TRUE);
+  GUID subType = { 0 };
+  HRESULT hr = aInputType->GetGUID(MF_MT_SUBTYPE, &subType);
   NS_ENSURE_TRUE(SUCCEEDED(hr), hr);
 
-  gfx::IntSize* size = reinterpret_cast<gfx::IntSize*>(aData);
-  hr = MFSetAttributeSize(aOutput, MF_MT_FRAME_SIZE, size->width, size->height);
-  NS_ENSURE_TRUE(SUCCEEDED(hr), hr);
-
-  return S_OK;
-}
-
-HRESULT
-D3D11DXVA2Manager::ConfigureForSize(uint32_t aWidth, uint32_t aHeight)
-{
-  if (mConfiguredForSize && aWidth == mWidth && aHeight == mHeight) {
-    // If the size hasn't changed, don't reconfigure.
+  if (subType == mInputSubType && aWidth == mWidth && aHeight == mHeight) {
+    // If the media type hasn't changed, don't reconfigure.
     return S_OK;
   }
 
-  mWidth = aWidth;
-  mHeight = aHeight;
-
+  // Create a copy of our input type.
   RefPtr<IMFMediaType> inputType;
-  HRESULT hr = wmf::MFCreateMediaType(getter_AddRefs(inputType));
+  hr = wmf::MFCreateMediaType(getter_AddRefs(inputType));
+  NS_ENSURE_TRUE(SUCCEEDED(hr), hr);
+  hr = aInputType->CopyAllItems(inputType);
   NS_ENSURE_TRUE(SUCCEEDED(hr), hr);
 
-  hr = inputType->SetGUID(MF_MT_MAJOR_TYPE, MFMediaType_Video);
-  NS_ENSURE_TRUE(SUCCEEDED(hr), hr);
-
-  hr = inputType->SetGUID(MF_MT_SUBTYPE, MFVideoFormat_NV12);
-  NS_ENSURE_TRUE(SUCCEEDED(hr), hr);
-
-  hr = inputType->SetUINT32(MF_MT_INTERLACE_MODE, MFVideoInterlace_Progressive);
-  NS_ENSURE_TRUE(SUCCEEDED(hr), hr);
-
-  hr = inputType->SetUINT32(MF_MT_ALL_SAMPLES_INDEPENDENT, TRUE);
+  hr = MFSetAttributeSize(inputType, MF_MT_FRAME_SIZE, aWidth, aHeight);
   NS_ENSURE_TRUE(SUCCEEDED(hr), hr);
 
   RefPtr<IMFAttributes> attr;
   mozilla::mscom::EnsureMTA(
     [&]() -> void { attr = mTransform->GetAttributes(); });
   NS_ENSURE_TRUE(attr != nullptr, E_FAIL);
 
   hr = attr->SetUINT32(MF_XVP_PLAYBACK_MODE, TRUE);
   NS_ENSURE_TRUE(SUCCEEDED(hr), hr);
 
   hr = attr->SetUINT32(MF_LOW_LATENCY, FALSE);
   NS_ENSURE_TRUE(SUCCEEDED(hr), hr);
 
-  hr = MFSetAttributeSize(inputType, MF_MT_FRAME_SIZE, aWidth, aHeight);
-  NS_ENSURE_TRUE(SUCCEEDED(hr), hr);
-
   RefPtr<IMFMediaType> outputType;
   hr = wmf::MFCreateMediaType(getter_AddRefs(outputType));
   NS_ENSURE_TRUE(SUCCEEDED(hr), hr);
 
   hr = outputType->SetGUID(MF_MT_MAJOR_TYPE, MFMediaType_Video);
   NS_ENSURE_TRUE(SUCCEEDED(hr), hr);
 
   hr = outputType->SetGUID(MF_MT_SUBTYPE, MFVideoFormat_ARGB32);
   NS_ENSURE_TRUE(SUCCEEDED(hr), hr);
 
-  gfx::IntSize size(mWidth, mHeight);
   hr = E_FAIL;
   mozilla::mscom::EnsureMTA([&]() -> void {
-    hr =
-      mTransform->SetMediaTypes(inputType, outputType, ConfigureOutput, &size);
+    hr = mTransform->SetMediaTypes(
+      inputType, outputType, [aWidth, aHeight](IMFMediaType* aOutput) {
+        HRESULT hr = aOutput->SetUINT32(MF_MT_INTERLACE_MODE,
+                                        MFVideoInterlace_Progressive);
+        NS_ENSURE_TRUE(SUCCEEDED(hr), hr);
+        hr = aOutput->SetUINT32(MF_MT_ALL_SAMPLES_INDEPENDENT, TRUE);
+        NS_ENSURE_TRUE(SUCCEEDED(hr), hr);
+        hr = MFSetAttributeSize(aOutput, MF_MT_FRAME_SIZE, aWidth, aHeight);
+        NS_ENSURE_TRUE(SUCCEEDED(hr), hr);
+
+        return S_OK;
+      });
   });
   NS_ENSURE_TRUE(SUCCEEDED(hr), hr);
 
-  mConfiguredForSize = true;
+  mWidth = aWidth;
+  mHeight = aHeight;
+  mInputType = inputType;
+  mInputSubType = subType;
 
   return S_OK;
 }
 
 bool
 D3D11DXVA2Manager::CanCreateDecoder(const D3D11_VIDEO_DECODER_DESC& aDesc,
                                     const float aFramerate) const
 {
--- a/dom/media/platforms/wmf/DXVA2Manager.h
+++ b/dom/media/platforms/wmf/DXVA2Manager.h
@@ -41,25 +41,28 @@ public:
   // IMFDXGIDeviceManager. It is safe to call this on any thread.
   virtual IUnknown* GetDXVADeviceManager() = 0;
 
   // Creates an Image for the video frame stored in aVideoSample.
   virtual HRESULT CopyToImage(IMFSample* aVideoSample,
                               const gfx::IntRect& aRegion,
                               layers::Image** aOutImage) = 0;
 
-  virtual HRESULT CopyToBGRATexture(ID3D11Texture2D *aInTexture,
+  virtual HRESULT CopyToBGRATexture(ID3D11Texture2D* aInTexture,
+                                    const GUID& aSubType,
                                     ID3D11Texture2D** aOutTexture)
   {
     // Not implemented!
     MOZ_CRASH("CopyToBGRATexture not implemented on this manager.");
     return E_FAIL;
   }
 
-  virtual HRESULT ConfigureForSize(uint32_t aWidth, uint32_t aHeight)
+  virtual HRESULT ConfigureForSize(IMFMediaType* aInputType,
+                                   uint32_t aWidth,
+                                   uint32_t aHeight)
   {
     return S_OK;
   }
 
   virtual bool IsD3D11() { return false; }
 
   virtual ~DXVA2Manager();
 
--- a/dom/media/platforms/wmf/MFTDecoder.cpp
+++ b/dom/media/platforms/wmf/MFTDecoder.cpp
@@ -80,27 +80,31 @@ MFTDecoder::Create(HMODULE aDecoderDLL, 
   NS_ENSURE_TRUE(SUCCEEDED(hr), hr);
 
   return S_OK;
 }
 
 HRESULT
 MFTDecoder::SetMediaTypes(IMFMediaType* aInputType,
                           IMFMediaType* aOutputType,
-                          ConfigureOutputCallback aCallback,
-                          void* aData)
+                          std::function<HRESULT(IMFMediaType*)>&& aCallback)
 {
   MOZ_ASSERT(mscom::IsCurrentThreadMTA());
-  mOutputType = aOutputType;
 
   // Set the input type to the one the caller gave us...
   HRESULT hr = mDecoder->SetInputType(0, aInputType, 0);
   NS_ENSURE_TRUE(SUCCEEDED(hr), hr);
 
-  hr = SetDecoderOutputType(true /* match all attributes */, aCallback, aData);
+  GUID currentSubtype = {0};
+  hr = aOutputType->GetGUID(MF_MT_SUBTYPE, &currentSubtype);
+  NS_ENSURE_TRUE(SUCCEEDED(hr), hr);
+
+  hr = SetDecoderOutputType(currentSubtype,
+                            aOutputType,
+                            std::move(aCallback));
   NS_ENSURE_TRUE(SUCCEEDED(hr), hr);
 
   hr = mDecoder->GetInputStreamInfo(0, &mInputStreamInfo);
   NS_ENSURE_TRUE(SUCCEEDED(hr), hr);
 
   hr = SendMFTMessage(MFT_MESSAGE_NOTIFY_BEGIN_STREAMING, 0);
   NS_ENSURE_TRUE(SUCCEEDED(hr), hr);
 
@@ -116,58 +120,70 @@ MFTDecoder::GetAttributes()
   MOZ_ASSERT(mscom::IsCurrentThreadMTA());
   RefPtr<IMFAttributes> attr;
   HRESULT hr = mDecoder->GetAttributes(getter_AddRefs(attr));
   NS_ENSURE_TRUE(SUCCEEDED(hr), nullptr);
   return attr.forget();
 }
 
 HRESULT
-MFTDecoder::SetDecoderOutputType(bool aMatchAllAttributes,
-                                 ConfigureOutputCallback aCallback,
-                                 void* aData)
+MFTDecoder::FindDecoderOutputType()
+{
+  MOZ_ASSERT(mscom::IsCurrentThreadMTA());
+  MOZ_ASSERT(mOutputType, "SetDecoderTypes must have been called once");
+
+  return FindDecoderOutputTypeWithSubtype(mOutputSubType);
+}
+
+HRESULT
+MFTDecoder::FindDecoderOutputTypeWithSubtype(const GUID& aSubType)
+{
+  return SetDecoderOutputType(
+    aSubType, nullptr, [](IMFMediaType*) { return S_OK; });
+}
+
+HRESULT
+MFTDecoder::SetDecoderOutputType(
+  const GUID& aSubType,
+  IMFMediaType* aTypeToUse,
+  std::function<HRESULT(IMFMediaType*)>&& aCallback)
 {
   MOZ_ASSERT(mscom::IsCurrentThreadMTA());
   NS_ENSURE_TRUE(mDecoder != nullptr, E_POINTER);
 
-  GUID currentSubtype = {0};
-  HRESULT hr = mOutputType->GetGUID(MF_MT_SUBTYPE, &currentSubtype);
-  NS_ENSURE_TRUE(SUCCEEDED(hr), hr);
+  if (!aTypeToUse) {
+    aTypeToUse = mOutputType;
+  }
 
   // Iterate the enumerate the output types, until we find one compatible
   // with what we need.
   RefPtr<IMFMediaType> outputType;
   UINT32 typeIndex = 0;
   while (SUCCEEDED(mDecoder->GetOutputAvailableType(
     0, typeIndex++, getter_AddRefs(outputType)))) {
     GUID outSubtype = {0};
-    hr = outputType->GetGUID(MF_MT_SUBTYPE, &outSubtype);
+    HRESULT hr = outputType->GetGUID(MF_MT_SUBTYPE, &outSubtype);
     NS_ENSURE_TRUE(SUCCEEDED(hr), hr);
 
-    BOOL resultMatch = currentSubtype == outSubtype;
-
-    if (resultMatch && aMatchAllAttributes) {
-      hr = mOutputType->Compare(outputType, MF_ATTRIBUTES_MATCH_OUR_ITEMS,
-                                &resultMatch);
+    if (aSubType == outSubtype) {
+      hr = aCallback(outputType);
       NS_ENSURE_TRUE(SUCCEEDED(hr), hr);
-    }
-    if (resultMatch == TRUE) {
-      if (aCallback) {
-        hr = aCallback(outputType, aData);
-        NS_ENSURE_TRUE(SUCCEEDED(hr), hr);
-      }
+
       hr = mDecoder->SetOutputType(0, outputType, 0);
       NS_ENSURE_TRUE(SUCCEEDED(hr), hr);
 
       hr = mDecoder->GetOutputStreamInfo(0, &mOutputStreamInfo);
       NS_ENSURE_TRUE(SUCCEEDED(hr), hr);
 
       mMFTProvidesOutputSamples = IsFlagSet(mOutputStreamInfo.dwFlags,
                                             MFT_OUTPUT_STREAM_PROVIDES_SAMPLES);
 
+      mOutputType = outputType;
+      mOutputSubType = outSubtype;
+
       return S_OK;
     }
     outputType = nullptr;
   }
   return E_FAIL;
 }
 
 HRESULT
--- a/dom/media/platforms/wmf/MFTDecoder.h
+++ b/dom/media/platforms/wmf/MFTDecoder.h
@@ -33,29 +33,28 @@ public:
 
   // Sets the input and output media types. Call after Init().
   //
   // Params:
   //  - aInputType needs at least major and minor types set.
   //  - aOutputType needs at least major and minor types set.
   //    This is used to select the matching output type out
   //    of all the available output types of the MFT.
-  typedef HRESULT (*ConfigureOutputCallback)(IMFMediaType* aOutputType,
-                                             void* aData);
   HRESULT SetMediaTypes(IMFMediaType* aInputType,
                         IMFMediaType* aOutputType,
-                        ConfigureOutputCallback aCallback = nullptr,
-                        void* aData = nullptr);
+                        std::function<HRESULT(IMFMediaType*)>&& aCallback =
+                          [](IMFMediaType* aOutput) { return S_OK; });
 
   // Returns the MFT's IMFAttributes object.
   already_AddRefed<IMFAttributes> GetAttributes();
 
   // Retrieves the media type being output. This may not be valid until
   //  the first sample is decoded.
   HRESULT GetOutputMediaType(RefPtr<IMFMediaType>& aMediaType);
+  const GUID& GetOutputMediaSubType() const { return mOutputSubType; }
 
   // Submits data into the MFT for processing.
   //
   // Returns:
   //  - MF_E_NOTACCEPTING if the decoder can't accept input. The data
   //    must be resubmitted after Output() stops producing output.
   HRESULT Input(const uint8_t* aData,
                 uint32_t aDataSize,
@@ -83,28 +82,34 @@ public:
 
   // Sends a flush message to the MFT. This causes it to discard all
   // input data. Use before seeking.
   HRESULT Flush();
 
   // Sends a message to the MFT.
   HRESULT SendMFTMessage(MFT_MESSAGE_TYPE aMsg, ULONG_PTR aData);
 
-  HRESULT SetDecoderOutputType(bool aMatchAllAttributes,
-                               ConfigureOutputCallback aCallback,
-                               void* aData);
+  HRESULT FindDecoderOutputTypeWithSubtype(const GUID& aSubType);
+  HRESULT FindDecoderOutputType();
 private:
+  // Will search a suitable MediaType using aTypeToUse if set, if not will
+  // use the current mOutputType.
+  HRESULT SetDecoderOutputType(
+    const GUID& aSubType,
+    IMFMediaType* aTypeToUse,
+    std::function<HRESULT(IMFMediaType*)>&& aCallback);
   HRESULT CreateOutputSample(RefPtr<IMFSample>* aOutSample);
 
   MFT_INPUT_STREAM_INFO mInputStreamInfo;
   MFT_OUTPUT_STREAM_INFO mOutputStreamInfo;
 
   RefPtr<IMFTransform> mDecoder;
 
   RefPtr<IMFMediaType> mOutputType;
+  GUID mOutputSubType;
 
   // True if the IMFTransform allocates the samples that it returns.
   bool mMFTProvidesOutputSamples = false;
 
   // True if we need to mark the next sample as a discontinuity.
   bool mDiscontinuity = true;
 };
 
--- a/dom/media/platforms/wmf/WMFAudioMFTManager.cpp
+++ b/dom/media/platforms/wmf/WMFAudioMFTManager.cpp
@@ -232,19 +232,18 @@ WMFAudioMFTManager::Output(int64_t aStre
   HRESULT hr;
   int typeChangeCount = 0;
   while (true) {
     hr = mDecoder->Output(&sample);
     if (hr == MF_E_TRANSFORM_NEED_MORE_INPUT) {
       return hr;
     }
     if (hr == MF_E_TRANSFORM_STREAM_CHANGE) {
-      hr = mDecoder->SetDecoderOutputType(true /* check all attribute */,
-                                          nullptr,
-                                          nullptr);
+      hr = mDecoder->FindDecoderOutputType();
+      NS_ENSURE_TRUE(SUCCEEDED(hr), hr);
       hr = UpdateOutputType();
       NS_ENSURE_TRUE(SUCCEEDED(hr), hr);
       // Catch infinite loops, but some decoders perform at least 2 stream
       // changes on consecutive calls, so be permissive.
       // 100 is arbitrarily > 2.
       NS_ENSURE_TRUE(typeChangeCount < 100, MF_E_TRANSFORM_STREAM_CHANGE);
       ++typeChangeCount;
       continue;
--- a/dom/media/platforms/wmf/WMFVideoMFTManager.cpp
+++ b/dom/media/platforms/wmf/WMFVideoMFTManager.cpp
@@ -681,18 +681,18 @@ WMFVideoMFTManager::InitInternal()
     // re-do initialization.
     return InitInternal();
   }
 
   LOG("Video Decoder initialized, Using DXVA: %s",
       (mUseHwAccel ? "Yes" : "No"));
 
   if (mUseHwAccel) {
-    hr = mDXVA2Manager->ConfigureForSize(mVideoInfo.ImageRect().width,
-                                         mVideoInfo.ImageRect().height);
+    hr = mDXVA2Manager->ConfigureForSize(
+      outputType, mVideoInfo.ImageRect().width, mVideoInfo.ImageRect().height);
     NS_ENSURE_TRUE(SUCCEEDED(hr),
                    MediaResult(NS_ERROR_DOM_MEDIA_FATAL_ERR,
                                RESULT_DETAIL("Fail to configure image size for "
                                              "DXVA2Manager.")));
   } else {
     mYUVColorSpace = GetYUVColorSpace(outputType);
     GetDefaultStride(outputType, mVideoInfo.ImageRect().width, &mVideoStride);
   }
@@ -895,18 +895,28 @@ WMFVideoMFTManager::CreateBasicVideoFram
     hr = twoDBuffer->Lock2D(&data, &stride);
     NS_ENSURE_TRUE(SUCCEEDED(hr), hr);
   } else {
     hr = buffer->Lock(&data, nullptr, nullptr);
     NS_ENSURE_TRUE(SUCCEEDED(hr), hr);
     stride = mVideoStride;
   }
 
-  // YV12, planar format: [YYYY....][VVVV....][UUUU....]
+  const GUID& subType = mDecoder->GetOutputMediaSubType();
+  MOZ_DIAGNOSTIC_ASSERT(subType == MFVideoFormat_YV12 ||
+                        subType == MFVideoFormat_P010 ||
+                        subType == MFVideoFormat_P016);
+  const gfx::ColorDepth colorDepth = subType == MFVideoFormat_YV12
+                                       ? gfx::ColorDepth::COLOR_8
+                                       : gfx::ColorDepth::COLOR_16;
+
+  // YV12, planar format (3 planes): [YYYY....][VVVV....][UUUU....]
   // i.e., Y, then V, then U.
+  // P010, P016 planar format (2 planes) [YYYY....][UVUV...]
+  // See https://docs.microsoft.com/en-us/windows/desktop/medfound/10-bit-and-16-bit-yuv-video-formats
   VideoData::YCbCrBuffer b;
 
   uint32_t videoWidth = mImageSize.width;
   uint32_t videoHeight = mImageSize.height;
 
   // Y (Y') plane
   b.mPlanes[0].mData = data;
   b.mPlanes[0].mStride = stride;
@@ -918,43 +928,63 @@ WMFVideoMFTManager::CreateBasicVideoFram
   MOZ_DIAGNOSTIC_ASSERT(mDecodedImageSize.height % 16 == 0,
                         "decoded height must be 16 bytes aligned");
   uint32_t y_size = stride * mDecodedImageSize.height;
   uint32_t v_size = stride * mDecodedImageSize.height / 4;
   uint32_t halfStride = (stride + 1) / 2;
   uint32_t halfHeight = (videoHeight + 1) / 2;
   uint32_t halfWidth = (videoWidth + 1) / 2;
 
-  // U plane (Cb)
-  b.mPlanes[1].mData = data + y_size + v_size;
-  b.mPlanes[1].mStride = halfStride;
-  b.mPlanes[1].mHeight = halfHeight;
-  b.mPlanes[1].mWidth = halfWidth;
-  b.mPlanes[1].mOffset = 0;
-  b.mPlanes[1].mSkip = 0;
+  if (subType == MFVideoFormat_YV12) {
+    // U plane (Cb)
+    b.mPlanes[1].mData = data + y_size + v_size;
+    b.mPlanes[1].mStride = halfStride;
+    b.mPlanes[1].mHeight = halfHeight;
+    b.mPlanes[1].mWidth = halfWidth;
+    b.mPlanes[1].mOffset = 0;
+    b.mPlanes[1].mSkip = 0;
 
-  // V plane (Cr)
-  b.mPlanes[2].mData = data + y_size;
-  b.mPlanes[2].mStride = halfStride;
-  b.mPlanes[2].mHeight = halfHeight;
-  b.mPlanes[2].mWidth = halfWidth;
-  b.mPlanes[2].mOffset = 0;
-  b.mPlanes[2].mSkip = 0;
+    // V plane (Cr)
+    b.mPlanes[2].mData = data + y_size;
+    b.mPlanes[2].mStride = halfStride;
+    b.mPlanes[2].mHeight = halfHeight;
+    b.mPlanes[2].mWidth = halfWidth;
+    b.mPlanes[2].mOffset = 0;
+    b.mPlanes[2].mSkip = 0;
+  } else {
+    // U plane (Cb)
+    b.mPlanes[1].mData = data + y_size;
+    b.mPlanes[1].mStride = stride;
+    b.mPlanes[1].mHeight = halfHeight;
+    b.mPlanes[1].mWidth = halfWidth;
+    b.mPlanes[1].mOffset = 0;
+    b.mPlanes[1].mSkip = 1;
+
+    // V plane (Cr)
+    b.mPlanes[2].mData = data + y_size + sizeof(short);
+    b.mPlanes[2].mStride = stride;
+    b.mPlanes[2].mHeight = halfHeight;
+    b.mPlanes[2].mWidth = halfWidth;
+    b.mPlanes[2].mOffset = 0;
+    b.mPlanes[2].mSkip = 1;
+  }
 
   // YuvColorSpace
   b.mYUVColorSpace = mYUVColorSpace;
+  b.mColorDepth = colorDepth;
 
   TimeUnit pts = GetSampleTime(aSample);
   NS_ENSURE_TRUE(pts.IsValid(), E_FAIL);
   TimeUnit duration = GetSampleDuration(aSample);
   NS_ENSURE_TRUE(duration.IsValid(), E_FAIL);
   gfx::IntRect pictureRegion =
     mVideoInfo.ScaledImageRect(videoWidth, videoHeight);
 
-  if (!mKnowsCompositor || !mKnowsCompositor->SupportsD3D11() || !mIMFUsable) {
+  if (colorDepth != gfx::ColorDepth::COLOR_8 || !mKnowsCompositor ||
+      !mKnowsCompositor->SupportsD3D11() || !mIMFUsable) {
     RefPtr<VideoData> v =
       VideoData::CreateAndCopyData(mVideoInfo,
                                    mImageContainer,
                                    aStreamOffset,
                                    pts,
                                    duration,
                                    b,
                                    false,
@@ -1055,31 +1085,47 @@ WMFVideoMFTManager::Output(int64_t aStre
   while (true) {
     hr = mDecoder->Output(&sample);
     if (hr == MF_E_TRANSFORM_NEED_MORE_INPUT) {
       return MF_E_TRANSFORM_NEED_MORE_INPUT;
     }
 
     if (hr == MF_E_TRANSFORM_STREAM_CHANGE) {
       MOZ_ASSERT(!sample);
-      // Video stream output type change, probably geometric aperture change.
+      // Video stream output type change, probably geometric aperture change or
+      // pixel type.
       // We must reconfigure the decoder output type.
-      hr = mDecoder->SetDecoderOutputType(false /* check all attribute */,
-                                          nullptr,
-                                          nullptr);
+
+      // Attempt to find an appropriate OutputType, trying in order:
+      // if HW accelerated: NV12, P010, P016
+      // if SW: YV12, P010, P016
+      if (FAILED((hr = (mDecoder->FindDecoderOutputTypeWithSubtype(
+                    mUseHwAccel ? MFVideoFormat_NV12 : MFVideoFormat_YV12)))) &&
+          FAILED((hr = mDecoder->FindDecoderOutputTypeWithSubtype(
+                    MFVideoFormat_P010))) &&
+          FAILED((hr = mDecoder->FindDecoderOutputTypeWithSubtype(
+                    MFVideoFormat_P016)))) {
+        LOG("No suitable output format found");
+        return hr;
+      }
+
+      RefPtr<IMFMediaType> outputType;
+      hr = mDecoder->GetOutputMediaType(outputType);
       NS_ENSURE_TRUE(SUCCEEDED(hr), hr);
 
-      if (!mUseHwAccel) {
+      if (mUseHwAccel) {
+        hr = mDXVA2Manager->ConfigureForSize(outputType,
+                                             mVideoInfo.ImageRect().width,
+                                             mVideoInfo.ImageRect().height);
+        NS_ENSURE_TRUE(SUCCEEDED(hr), hr);
+      } else {
         // The stride may have changed, recheck for it.
-        RefPtr<IMFMediaType> outputType;
-        hr = mDecoder->GetOutputMediaType(outputType);
-        NS_ENSURE_TRUE(SUCCEEDED(hr), hr);
         mYUVColorSpace = GetYUVColorSpace(outputType);
-        hr = GetDefaultStride(outputType, mVideoInfo.ImageRect().width,
-                              &mVideoStride);
+        hr = GetDefaultStride(
+          outputType, mVideoInfo.ImageRect().width, &mVideoStride);
         NS_ENSURE_TRUE(SUCCEEDED(hr), hr);
 
         UINT32 width = 0, height = 0;
         hr = MFGetAttributeSize(outputType, MF_MT_FRAME_SIZE, &width, &height);
         NS_ENSURE_TRUE(SUCCEEDED(hr), hr);
         NS_ENSURE_TRUE(width <= MAX_VIDEO_WIDTH, E_FAIL);
         NS_ENSURE_TRUE(height <= MAX_VIDEO_HEIGHT, E_FAIL);
         mDecodedImageSize = gfx::IntSize(width, height);
--- a/dom/script/ScriptLoader.cpp
+++ b/dom/script/ScriptLoader.cpp
@@ -2815,38 +2815,48 @@ ScriptLoader::ConvertToUTF16(nsIChannel*
 
   if (!unicodeDecoder) {
     // Curiously, there are various callers that don't pass aDocument. The
     // fallback in the old code was ISO-8859-1, which behaved like
     // windows-1252.
     unicodeDecoder = WINDOWS_1252_ENCODING->NewDecoderWithoutBOMHandling();
   }
 
-  CheckedInt<size_t> unicodeLength =
-    unicodeDecoder->MaxUTF16BufferLength(aLength);
-  if (!unicodeLength.isValid()) {
+  CheckedInt<size_t> maxLength = unicodeDecoder->MaxUTF16BufferLength(aLength);
+  if (!maxLength.isValid()) {
+    aBufOut = nullptr;
+    aLengthOut = 0;
     return NS_ERROR_OUT_OF_MEMORY;
   }
 
-  aBufOut =
-    static_cast<char16_t*>(js_malloc(unicodeLength.value() * sizeof(char16_t)));
+  size_t unicodeLength = maxLength.value();
+
+  maxLength *= sizeof(char16_t);
+
+  if (!maxLength.isValid()) {
+    aBufOut = nullptr;
+    aLengthOut = 0;
+    return NS_ERROR_OUT_OF_MEMORY;
+  }
+
+  aBufOut = static_cast<char16_t*>(js_malloc(maxLength.value()));
   if (!aBufOut) {
     aLengthOut = 0;
     return NS_ERROR_OUT_OF_MEMORY;
   }
 
   uint32_t result;
   size_t read;
   size_t written;
   bool hadErrors;
-  Tie(result, read, written, hadErrors) = unicodeDecoder->DecodeToUTF16(
-    data, MakeSpan(aBufOut, unicodeLength.value()), true);
+  Tie(result, read, written, hadErrors) =
+    unicodeDecoder->DecodeToUTF16(data, MakeSpan(aBufOut, unicodeLength), true);
   MOZ_ASSERT(result == kInputEmpty);
   MOZ_ASSERT(read == aLength);
-  MOZ_ASSERT(written <= unicodeLength.value());
+  MOZ_ASSERT(written <= unicodeLength);
   Unused << hadErrors;
   aLengthOut = written;
 
   return NS_OK;
 }
 
 nsresult
 ScriptLoader::OnStreamComplete(nsIIncrementalStreamLoader* aLoader,
--- a/gfx/2d/Tools.h
+++ b/gfx/2d/Tools.h
@@ -113,16 +113,17 @@ static inline SurfaceFormat
 SurfaceFormatForColorDepth(ColorDepth aColorDepth)
 {
   SurfaceFormat format = SurfaceFormat::A8;
   switch (aColorDepth) {
     case ColorDepth::COLOR_8:
       break;
     case ColorDepth::COLOR_10:
     case ColorDepth::COLOR_12:
+    case ColorDepth::COLOR_16:
       format = SurfaceFormat::A16;
       break;
     case ColorDepth::UNKNOWN:
       MOZ_ASSERT_UNREACHABLE("invalid color depth value");
   }
   return format;
 }
 
@@ -134,16 +135,19 @@ BitDepthForColorDepth(ColorDepth aColorD
     case ColorDepth::COLOR_8:
       break;
     case ColorDepth::COLOR_10:
       depth = 10;
       break;
     case ColorDepth::COLOR_12:
       depth = 12;
       break;
+    case ColorDepth::COLOR_16:
+      depth = 16;
+      break;
     case ColorDepth::UNKNOWN:
       MOZ_ASSERT_UNREACHABLE("invalid color depth value");
   }
   return depth;
 }
 
 static inline ColorDepth
 ColorDepthForBitDepth(uint8_t aBitDepth)
@@ -153,16 +157,19 @@ ColorDepthForBitDepth(uint8_t aBitDepth)
     case 8:
       break;
     case 10:
       depth = ColorDepth::COLOR_10;
       break;
     case 12:
       depth = ColorDepth::COLOR_12;
       break;
+    case 16:
+      depth = ColorDepth::COLOR_16;
+      break;
     default:
       MOZ_ASSERT_UNREACHABLE("invalid color depth value");
   }
   return depth;
 }
 
 // 10 and 12 bits color depth image are using 16 bits integers for storage
 // As such we need to rescale the value from 10 or 12 bits to 16.
@@ -174,16 +181,18 @@ RescalingFactorForColorDepth(ColorDepth 
     case ColorDepth::COLOR_8:
       break;
     case ColorDepth::COLOR_10:
       factor = 64;
       break;
     case ColorDepth::COLOR_12:
       factor = 16;
       break;
+    case ColorDepth::COLOR_16:
+      break;
     case ColorDepth::UNKNOWN:
       MOZ_ASSERT_UNREACHABLE("invalid color depth value");
   }
   return factor;
 }
 
 static inline bool
 IsOpaqueFormat(SurfaceFormat aFormat) {
--- a/gfx/2d/Types.h
+++ b/gfx/2d/Types.h
@@ -101,16 +101,17 @@ inline bool IsOpaque(SurfaceFormat aForm
     return false;
   }
 }
 
 enum class ColorDepth : uint8_t {
   COLOR_8,
   COLOR_10,
   COLOR_12,
+  COLOR_16,
   UNKNOWN
 };
 
 enum class FilterType : int8_t {
   BLEND = 0,
   TRANSFORM,
   MORPHOLOGY,
   COLOR_MATRIX,
--- a/gfx/layers/D3D11ShareHandleImage.cpp
+++ b/gfx/layers/D3D11ShareHandleImage.cpp
@@ -19,32 +19,39 @@
 #include <memory>
 
 namespace mozilla {
 namespace layers {
 
 using namespace gfx;
 
 D3D11ShareHandleImage::D3D11ShareHandleImage(const gfx::IntSize& aSize,
-                                             const gfx::IntRect& aRect)
- : Image(nullptr, ImageFormat::D3D11_SHARE_HANDLE_TEXTURE),
-   mSize(aSize),
-   mPictureRect(aRect)
+                                             const gfx::IntRect& aRect,
+                                             const GUID& aSourceFormat)
+  : Image(nullptr, ImageFormat::D3D11_SHARE_HANDLE_TEXTURE)
+  , mSize(aSize)
+  , mPictureRect(aRect)
+  , mSourceFormat(aSourceFormat)
+
 {
 }
 
 bool
-D3D11ShareHandleImage::AllocateTexture(D3D11RecycleAllocator* aAllocator, ID3D11Device* aDevice)
+D3D11ShareHandleImage::AllocateTexture(D3D11RecycleAllocator* aAllocator,
+                                       ID3D11Device* aDevice)
 {
   if (aAllocator) {
-    if (gfxPrefs::PDMWMFUseNV12Format() &&
+    if (mSourceFormat == MFVideoFormat_NV12 &&
+        gfxPrefs::PDMWMFUseNV12Format() &&
         gfx::DeviceManagerDx::Get()->CanUseNV12()) {
-      mTextureClient = aAllocator->CreateOrRecycleClient(gfx::SurfaceFormat::NV12, mSize);
+      mTextureClient =
+        aAllocator->CreateOrRecycleClient(gfx::SurfaceFormat::NV12, mSize);
     } else {
-      mTextureClient = aAllocator->CreateOrRecycleClient(gfx::SurfaceFormat::B8G8R8A8, mSize);
+      mTextureClient =
+        aAllocator->CreateOrRecycleClient(gfx::SurfaceFormat::B8G8R8A8, mSize);
     }
     if (mTextureClient) {
       mTexture = static_cast<D3D11TextureData*>(mTextureClient->GetInternalData())->GetD3D11Texture();
       return true;
     }
     return false;
   } else {
     MOZ_ASSERT(aDevice);
@@ -82,31 +89,32 @@ D3D11ShareHandleImage::GetAsSourceSurfac
   RefPtr<ID3D11Device> device;
   texture->GetDevice(getter_AddRefs(device));
 
   D3D11_TEXTURE2D_DESC desc;
   texture->GetDesc(&desc);
 
   HRESULT hr;
 
-  if (desc.Format == DXGI_FORMAT_NV12) {
+  if (desc.Format != DXGI_FORMAT_B8G8R8A8_UNORM) {
     nsAutoCString error;
     std::unique_ptr<DXVA2Manager> manager(DXVA2Manager::CreateD3D11DXVA(nullptr, error, device));
 
     if (!manager) {
       gfxWarning() << "Failed to create DXVA2 manager!";
       return nullptr;
     }
 
     RefPtr<ID3D11Texture2D> outTexture;
 
-    hr = manager->CopyToBGRATexture(texture, getter_AddRefs(outTexture));
+    hr = manager->CopyToBGRATexture(
+      texture, mSourceFormat, getter_AddRefs(outTexture));
 
     if (FAILED(hr)) {
-      gfxWarning() << "Failed to copy NV12 to BGRA texture.";
+      gfxWarning() << "Failed to copy to BGRA texture.";
       return nullptr;
     }
 
     texture = outTexture;
     texture->GetDesc(&desc);
   }
 
   CD3D11_TEXTURE2D_DESC softDesc(desc.Format, desc.Width, desc.Height);
--- a/gfx/layers/D3D11ShareHandleImage.h
+++ b/gfx/layers/D3D11ShareHandleImage.h
@@ -47,31 +47,34 @@ protected:
 
 // Image class that wraps a ID3D11Texture2D. This class copies the image
 // passed into SetData(), so that it can be accessed from other D3D devices.
 // This class also manages the synchronization of the copy, to ensure the
 // resource is ready to use.
 class D3D11ShareHandleImage final : public Image {
 public:
   D3D11ShareHandleImage(const gfx::IntSize& aSize,
-                        const gfx::IntRect& aRect);
+                        const gfx::IntRect& aRect,
+                        const GUID& aSourceFormat);
   virtual ~D3D11ShareHandleImage() {}
 
-  bool AllocateTexture(D3D11RecycleAllocator* aAllocator, ID3D11Device* aDevice);
+  bool AllocateTexture(D3D11RecycleAllocator* aAllocator,
+                       ID3D11Device* aDevice);
 
   gfx::IntSize GetSize() const override;
   already_AddRefed<gfx::SourceSurface> GetAsSourceSurface() override;
   TextureClient* GetTextureClient(KnowsCompositor* aForwarder) override;
   gfx::IntRect GetPictureRect() const override { return mPictureRect; }
 
   ID3D11Texture2D* GetTexture() const;
 
 private:
   gfx::IntSize mSize;
   gfx::IntRect mPictureRect;
+  const GUID mSourceFormat;
   RefPtr<TextureClient> mTextureClient;
   RefPtr<ID3D11Texture2D> mTexture;
 };
 
 } // namepace layers
 } // namespace mozilla
 
 #endif // GFX_D3DSURFACEIMAGE_H
--- a/gfx/layers/client/TextureClient.cpp
+++ b/gfx/layers/client/TextureClient.cpp
@@ -1890,23 +1890,16 @@ MappedYCbCrChannelData::CopyInto(MappedY
     return true;
   }
 
   MOZ_ASSERT(bytesPerPixel == 1 || bytesPerPixel == 2);
   // slow path
   if (bytesPerPixel == 1) {
     copyData(aDst.data, aDst, data, *this);
   } else if (bytesPerPixel == 2) {
-    if (skip != 0) {
-      // The skip value definition doesn't specify if it's in bytes, or in
-      // "pixels". We will assume the later. There are currently no decoders
-      // returning HDR content with a skip value different than zero anyway.
-      NS_WARNING("skip value non zero for HDR content, please verify code "
-                 "(see bug 1421187)");
-    }
     copyData(reinterpret_cast<uint16_t*>(aDst.data),
              aDst,
              reinterpret_cast<uint16_t*>(data),
              *this);
   }
   return true;
 }
 
--- a/gfx/webrender_bindings/WebRenderTypes.h
+++ b/gfx/webrender_bindings/WebRenderTypes.h
@@ -859,16 +859,18 @@ static inline wr::WrYuvColorSpace ToWrYu
 static inline wr::WrColorDepth ToWrColorDepth(gfx::ColorDepth aColorDepth) {
   switch (aColorDepth) {
     case gfx::ColorDepth::COLOR_8:
       return wr::WrColorDepth::Color8;
     case gfx::ColorDepth::COLOR_10:
       return wr::WrColorDepth::Color10;
     case gfx::ColorDepth::COLOR_12:
       return wr::WrColorDepth::Color12;
+    case gfx::ColorDepth::COLOR_16:
+      return wr::WrColorDepth::Color16;
     default:
       MOZ_ASSERT_UNREACHABLE("Tried to convert invalid color depth value.");
   }
   return wr::WrColorDepth::Color8;
 }
 
 static inline wr::SyntheticItalics DegreesToSyntheticItalics(float aDegrees) {
   wr::SyntheticItalics synthetic_italics;
--- a/toolkit/components/extensions/parent/ext-clipboard.js
+++ b/toolkit/components/extensions/parent/ext-clipboard.js
@@ -1,79 +1,74 @@
 /* -*- Mode: indent-tabs-mode: nil; js-indent-level: 2 -*- */
 /* vim: set sts=2 sw=2 et tw=80: */
 "use strict";
 
 XPCOMUtils.defineLazyServiceGetter(this, "imgTools",
                                    "@mozilla.org/image/tools;1", "imgITools");
 
-const SupportsInterfacePointer = Components.Constructor(
-  "@mozilla.org/supports-interface-pointer;1", "nsISupportsInterfacePointer");
 const Transferable = Components.Constructor(
   "@mozilla.org/widget/transferable;1", "nsITransferable");
 
 this.clipboard = class extends ExtensionAPI {
   getAPI(context) {
     return {
       clipboard: {
         async setImageData(imageData, imageType) {
           if (AppConstants.platform == "android") {
             return Promise.reject({message: "Writing images to the clipboard is not supported on Android"});
           }
           let mimeType = `image/${imageType}`;
-          let container;
+          let img;
           try {
-            container = imgTools.decodeImageFromArrayBuffer(imageData, mimeType);
+            img = imgTools.decodeImageFromArrayBuffer(imageData, mimeType);
           } catch (e) {
             return Promise.reject({message: `Data is not a valid ${imageType} image`});
           }
 
           // Other applications can only access the copied image once the data
           // is exported via the platform-specific clipboard APIs:
           // nsClipboard::SelectionGetEvent (widget/gtk/nsClipboard.cpp)
           // nsClipboard::PasteDictFromTransferable (widget/cocoa/nsClipboard.mm)
           // nsDataObj::GetDib (widget/windows/nsDataObj.cpp)
           //
           // The common protocol for exporting a nsITransferable as an image is:
           // - Use nsITransferable::GetTransferData to fetch the stored data.
-          // - QI a nsISupportsInterfacePointer and get the underlying pointer.
           // - QI imgIContainer on the pointer.
           // - Convert the image to the native clipboard format.
           //
           // Below we create a nsITransferable in the above format.
-          let imgPtr = new SupportsInterfacePointer();
-          imgPtr.data = container;
           let transferable = new Transferable();
           transferable.init(null);
           transferable.addDataFlavor(mimeType);
 
           // Internal consumers expect the image data to be stored as a
           // nsIInputStream. On Linux and Windows, pasted data is directly
           // retrieved from the system's native clipboard, and made available
           // as a nsIInputStream.
           //
           // On macOS, nsClipboard::GetNativeClipboardData (nsClipboard.mm) uses
           // a cached copy of nsITransferable if available, e.g. when the copy
           // was initiated by the same browser instance. Consequently, the
-          // transferable still holds a nsISupportsInterfacePointer pointer
+          // transferable still holds a imgIContainer pointer
           // instead of a nsIInputStream, and logic that assumes the data to be
           // a nsIInputStream instance fails.
           // For example HTMLEditor::InsertObject (HTMLEditorDataTransfer.cpp)
           // and DataTransferItem::FillInExternalData (DataTransferItem.cpp).
           //
           // As a work-around, we force nsClipboard::GetNativeClipboardData to
           // ignore the cached image data, by passing zero as the length
           // parameter to transferable.setTransferData. When the length is zero,
           // nsITransferable::GetTransferData will return NS_ERROR_FAILURE and
           // conveniently nsClipboard::GetNativeClipboardData will then fall
           // back to retrieving the data directly from the system's clipboard.
           //
           // Note that the length itself is not really used if the data is not
           // a string type, so the actual value does not matter.
-          transferable.setTransferData(mimeType, imgPtr, 0);
+          transferable.setTransferData(mimeType, img, 0);
 
           Services.clipboard.setData(
             transferable, null, Services.clipboard.kGlobalClipboard);
         },
       },
     };
   }
 };
--- a/widget/cocoa/nsClipboard.mm
+++ b/widget/cocoa/nsClipboard.mm
@@ -582,24 +582,18 @@ nsClipboard::PasteboardDictFromTransfera
       }
     }
     else if (flavorStr.EqualsLiteral(kPNGImageMime) || flavorStr.EqualsLiteral(kJPEGImageMime) ||
              flavorStr.EqualsLiteral(kJPGImageMime) || flavorStr.EqualsLiteral(kGIFImageMime) ||
              flavorStr.EqualsLiteral(kNativeImageMime)) {
       uint32_t dataSize = 0;
       nsCOMPtr<nsISupports> transferSupports;
       aTransferable->GetTransferData(flavorStr.get(), getter_AddRefs(transferSupports), &dataSize);
-      nsCOMPtr<nsISupportsInterfacePointer> ptrPrimitive(do_QueryInterface(transferSupports));
-      if (!ptrPrimitive)
-        continue;
 
-      nsCOMPtr<nsISupports> primitiveData;
-      ptrPrimitive->GetData(getter_AddRefs(primitiveData));
-
-      nsCOMPtr<imgIContainer> image(do_QueryInterface(primitiveData));
+      nsCOMPtr<imgIContainer> image(do_QueryInterface(transferSupports));
       if (!image) {
         NS_WARNING("Image isn't an imgIContainer in transferable");
         continue;
       }
 
       RefPtr<SourceSurface> surface =
         image->GetFrame(imgIContainer::FRAME_CURRENT,
                         imgIContainer::FLAG_SYNC_DECODE);
@@ -643,25 +637,16 @@ nsClipboard::PasteboardDictFromTransfera
       nsCOMPtr<nsISupports> genericFile;
       rv = aTransferable->GetTransferData(flavorStr.get(), getter_AddRefs(genericFile), &len);
       if (NS_FAILED(rv)) {
         continue;
       }
 
       nsCOMPtr<nsIFile> file(do_QueryInterface(genericFile));
       if (!file) {
-        nsCOMPtr<nsISupportsInterfacePointer> ptr(do_QueryInterface(genericFile));
-
-        if (ptr) {
-          ptr->GetData(getter_AddRefs(genericFile));
-          file = do_QueryInterface(genericFile);
-        }
-      }
-
-      if (!file) {
         continue;
       }
 
       nsAutoString fileURI;
       rv = file->GetPath(fileURI);
       if (NS_FAILED(rv)) {
         continue;
       }
--- a/widget/gtk/nsClipboard.cpp
+++ b/widget/gtk/nsClipboard.cpp
@@ -531,29 +531,28 @@ nsClipboard::SelectionGetEvent(GtkClipbo
     }
 
     // Check to see if the selection data is an image type
     if (gtk_targets_include_image(&selectionTarget, 1, TRUE)) {
         // Look through our transfer data for the image
         static const char* const imageMimeTypes[] = {
             kNativeImageMime, kPNGImageMime, kJPEGImageMime, kJPGImageMime, kGIFImageMime };
         nsCOMPtr<nsISupports> imageItem;
-        nsCOMPtr<nsISupportsInterfacePointer> ptrPrimitive;
-        for (uint32_t i = 0; !ptrPrimitive && i < ArrayLength(imageMimeTypes); i++) {
+        nsCOMPtr<imgIContainer> image;
+        for (uint32_t i = 0; i < ArrayLength(imageMimeTypes); i++) {
             rv = trans->GetTransferData(imageMimeTypes[i], getter_AddRefs(imageItem), &len);
-            ptrPrimitive = do_QueryInterface(imageItem);
+            image = do_QueryInterface(imageItem);
+            if (image) {
+                break;
+            }
         }
-        if (!ptrPrimitive)
+
+        if (!image) { // Not getting an image for an image mime type!?
             return;
-
-        nsCOMPtr<nsISupports> primitiveData;
-        ptrPrimitive->GetData(getter_AddRefs(primitiveData));
-        nsCOMPtr<imgIContainer> image(do_QueryInterface(primitiveData));
-        if (!image) // Not getting an image for an image mime type!?
-            return;
+        }
 
         GdkPixbuf* pixbuf = nsImageToPixbuf::ImageToPixbuf(image);
         if (!pixbuf)
             return;
 
         gtk_selection_data_set_pixbuf(aSelectionData, pixbuf);
         g_object_unref(pixbuf);
         return;
--- a/widget/gtk/nsDragService.cpp
+++ b/widget/gtk/nsDragService.cpp
@@ -1586,30 +1586,17 @@ CreateUriList(nsIArray *items, gchar **t
             } else {
                 // There is no uri available.  If there is a file available,
                 // create a uri from the file.
                 nsCOMPtr<nsISupports> data;
                 rv = item->GetTransferData(kFileMime,
                                            getter_AddRefs(data),
                                            &tmpDataLen);
                 if (NS_SUCCEEDED(rv)) {
-                    nsCOMPtr<nsIFile> file = do_QueryInterface(data);
-                    if (!file) {
-                        // Sometimes the file is wrapped in a
-                        // nsISupportsInterfacePointer. See bug 1310193 for
-                        // removing this distinction.
-                        nsCOMPtr<nsISupportsInterfacePointer> ptr =
-                          do_QueryInterface(data);
-                        if (ptr) {
-                            ptr->GetData(getter_AddRefs(data));
-                            file = do_QueryInterface(data);
-                        }
-                    }
-
-                    if (file) {
+                    if (nsCOMPtr<nsIFile> file = do_QueryInterface(data)) {
                         nsCOMPtr<nsIURI> fileURI;
                         NS_NewFileURI(getter_AddRefs(fileURI), file);
                         if (fileURI) {
                             nsAutoCString uristring;
                             fileURI->GetSpec(uristring);
                             g_string_append(uriList, uristring.get());
                             g_string_append(uriList, "\r\n");
                         }
--- a/widget/windows/nsDataObj.cpp
+++ b/widget/windows/nsDataObj.cpp
@@ -896,28 +896,16 @@ nsDataObj::GetDib(const nsACString& inFl
                   FORMATETC &aFormat,
                   STGMEDIUM & aSTG)
 {
   ULONG result = E_FAIL;
   uint32_t len = 0;
   nsCOMPtr<nsISupports> genericDataWrapper;
   mTransferable->GetTransferData(PromiseFlatCString(inFlavor).get(), getter_AddRefs(genericDataWrapper), &len);
   nsCOMPtr<imgIContainer> image ( do_QueryInterface(genericDataWrapper) );
-  if ( !image ) {
-    // Check if the image was put in an nsISupportsInterfacePointer wrapper.
-    // This might not be necessary any more, but could be useful for backwards
-    // compatibility.
-    nsCOMPtr<nsISupportsInterfacePointer> ptr(do_QueryInterface(genericDataWrapper));
-    if ( ptr ) {
-      nsCOMPtr<nsISupports> supports;
-      ptr->GetData(getter_AddRefs(supports));
-      image = do_QueryInterface(supports);
-    }
-  }
-  
   if ( image ) {
     // use the |nsImageToClipboard| helper class to build up a bitmap. We now own
     // the bits, and pass them back to the OS in |aSTG|.
     nsImageToClipboard converter(image, aFormat.cfFormat == CF_DIBV5);
     HANDLE bits = nullptr;
     nsresult rv = converter.GetPicture ( &bits );
     if ( NS_SUCCEEDED(rv) && bits ) {
       aSTG.hGlobal = bits;
@@ -1461,27 +1449,16 @@ HRESULT nsDataObj::DropFile(FORMATETC& a
 {
   nsresult rv;
   uint32_t len = 0;
   nsCOMPtr<nsISupports> genericDataWrapper;
 
   mTransferable->GetTransferData(kFileMime, getter_AddRefs(genericDataWrapper),
                                  &len);
   nsCOMPtr<nsIFile> file ( do_QueryInterface(genericDataWrapper) );
-
-  if (!file)
-  {
-    nsCOMPtr<nsISupportsInterfacePointer> ptr(do_QueryInterface(genericDataWrapper));
-    if (ptr) {
-      nsCOMPtr<nsISupports> supports;
-      ptr->GetData(getter_AddRefs(supports));
-      file = do_QueryInterface(supports);
-    }
-  }
-
   if (!file)
     return E_FAIL;
 
   aSTG.tymed = TYMED_HGLOBAL;
   aSTG.pUnkForRelease = nullptr;
 
   nsAutoString path;
   rv = file->GetPath(path);
@@ -1526,29 +1503,16 @@ HRESULT nsDataObj::DropImage(FORMATETC& 
 {
   nsresult rv;
   if (!mCachedTempFile) {
     uint32_t len = 0;
     nsCOMPtr<nsISupports> genericDataWrapper;
 
     mTransferable->GetTransferData(kNativeImageMime, getter_AddRefs(genericDataWrapper), &len);
     nsCOMPtr<imgIContainer> image(do_QueryInterface(genericDataWrapper));
-
-    if (!image) {
-      // Check if the image was put in an nsISupportsInterfacePointer wrapper.
-      // This might not be necessary any more, but could be useful for backwards
-      // compatibility.
-      nsCOMPtr<nsISupportsInterfacePointer> ptr(do_QueryInterface(genericDataWrapper));
-      if (ptr) {
-        nsCOMPtr<nsISupports> supports;
-        ptr->GetData(getter_AddRefs(supports));
-        image = do_QueryInterface(supports);
-      }
-    }
-
     if (!image) 
       return E_FAIL;
 
     // Use the clipboard helper class to build up a memory bitmap.
     nsImageToClipboard converter(image);
     HANDLE bits = nullptr;
     rv = converter.GetPicture(&bits); // Clipboard routines return a global handle we own.