Bug 1542674 - Make Media debug info machine parsable r=padenot,smaug,jya draft
authorTarek Ziadé <tarek@mozilla.com>
Mon, 27 May 2019 15:47:23 +0200
changeset 2026413 b0afe5e0494f6eed673b713b747acf9bc78cce7e
parent 2026412 a7bc904e71ee84c55643f1b4c33d6d9f48517898
child 2026414 527db4409230279d5e531b869a837ded4bfafdae
child 2026429 57bfd31e9a774a3a9c603809f173c302a50b66c0
push id368010
push usertziade@mozilla.com
push dateMon, 27 May 2019 13:57:38 +0000
treeherdertry@527db4409230 [default view] [failures only]
reviewerspadenot, smaug, jya
bugs1542674
milestone69.0a1
Bug 1542674 - Make Media debug info machine parsable r=padenot,smaug,jya This patch structurizes the media debug information via webidl dictionaries that are returned by HTMLMediaElement::GetMozRequestDebugInfo() and MediaSource::GetMozDebugReaderData(). Differential Revision: https://phabricator.services.mozilla.com/D27893
dom/html/HTMLMediaElement.cpp
dom/html/HTMLMediaElement.h
dom/media/BaseMediaResource.h
dom/media/ChannelMediaDecoder.cpp
dom/media/ChannelMediaDecoder.h
dom/media/ChannelMediaResource.cpp
dom/media/ChannelMediaResource.h
dom/media/MediaCache.cpp
dom/media/MediaCache.h
dom/media/MediaDecoder.cpp
dom/media/MediaDecoder.h
dom/media/MediaDecoderStateMachine.cpp
dom/media/MediaDecoderStateMachine.h
dom/media/MediaFormatReader.cpp
dom/media/MediaFormatReader.h
dom/media/mediasink/AudioSink.cpp
dom/media/mediasink/AudioSink.h
dom/media/mediasink/AudioSinkWrapper.cpp
dom/media/mediasink/AudioSinkWrapper.h
dom/media/mediasink/DecodedStream.cpp
dom/media/mediasink/DecodedStream.h
dom/media/mediasink/MediaSink.h
dom/media/mediasink/VideoSink.cpp
dom/media/mediasink/VideoSink.h
dom/media/mediasource/MediaSource.cpp
dom/media/mediasource/MediaSource.h
dom/media/mediasource/MediaSourceDecoder.cpp
dom/media/mediasource/MediaSourceDecoder.h
dom/media/mediasource/MediaSourceDemuxer.cpp
dom/media/mediasource/MediaSourceDemuxer.h
dom/media/mediasource/TrackBuffersManager.cpp
dom/media/mediasource/TrackBuffersManager.h
dom/media/test/marionette/yttest/support.py
dom/media/test/marionette/yttest/ytpage.py
dom/webidl/HTMLMediaElement.webidl
dom/webidl/MediaDebugInfo.webidl
dom/webidl/MediaSource.webidl
dom/webidl/moz.build
--- a/dom/html/HTMLMediaElement.cpp
+++ b/dom/html/HTMLMediaElement.cpp
@@ -1538,59 +1538,41 @@ void HTMLMediaElement::ContentRemoved(ns
 }
 
 already_AddRefed<MediaSource> HTMLMediaElement::GetMozMediaSourceObject()
     const {
   RefPtr<MediaSource> source = mMediaSource;
   return source.forget();
 }
 
-void HTMLMediaElement::GetMozDebugReaderData(nsAString& aString) {
-  if (mDecoder && !mSrcStream) {
-    nsAutoCString result;
-    mDecoder->GetMozDebugReaderData(result);
-    CopyUTF8toUTF16(result, aString);
-  }
-}
-
 already_AddRefed<Promise> HTMLMediaElement::MozRequestDebugInfo(
     ErrorResult& aRv) {
   RefPtr<Promise> promise = CreateDOMPromise(aRv);
   if (NS_WARN_IF(aRv.Failed())) {
     return nullptr;
   }
-
-  nsAutoString result;
-  GetMozDebugReaderData(result);
-
-  if (mVideoFrameContainer) {
-    result.AppendPrintf(
-        "Compositor dropped frame(including when element's invisible): %u\n",
-        mVideoFrameContainer->GetDroppedImageCount());
-  }
-
+  auto result = MakeUnique<dom::HTMLMediaElementDebugInfo>();
   if (mMediaKeys) {
-    nsString EMEInfo;
-    GetEMEInfo(EMEInfo);
-    result.AppendLiteral("EME Info: ");
-    result.Append(EMEInfo);
-    result.AppendLiteral("\n");
-  }
-
+    GetEMEInfo(result->mEMEInfo);
+  }
+  if (mVideoFrameContainer) {
+    result->mCompositorDroppedFrames =
+        mVideoFrameContainer->GetDroppedImageCount();
+  }
   if (mDecoder) {
-    mDecoder->RequestDebugInfo()->Then(
-        mAbstractMainThread, __func__,
-        [promise, result](const nsACString& aString) {
-          promise->MaybeResolve(result + NS_ConvertUTF8toUTF16(aString));
-        },
-        [promise, result]() { promise->MaybeResolve(result); });
+    mDecoder->RequestDebugInfo(result->mDecoder)
+        ->Then(
+            mAbstractMainThread, __func__,
+            [promise, ptr = std::move(result)]() {
+              promise->MaybeResolve(ptr.get());
+            },
+            []() { UNREACHABLE(); });
   } else {
-    promise->MaybeResolve(result);
-  }
-
+    promise->MaybeResolve(result.get());
+  }
   return promise.forget();
 }
 
 /* static */
 void HTMLMediaElement::MozEnableDebugLog(const GlobalObject&) {
   DecoderDoctorLogger::EnableLogging();
 }
 
@@ -1606,32 +1588,16 @@ already_AddRefed<Promise> HTMLMediaEleme
       [promise](const nsACString& aString) {
         promise->MaybeResolve(NS_ConvertUTF8toUTF16(aString));
       },
       [promise](nsresult rv) { promise->MaybeReject(rv); });
 
   return promise.forget();
 }
 
-already_AddRefed<Promise> HTMLMediaElement::MozDumpDebugInfo() {
-  ErrorResult rv;
-  RefPtr<Promise> promise = CreateDOMPromise(rv);
-  if (NS_WARN_IF(rv.Failed())) {
-    return nullptr;
-  }
-  if (mDecoder) {
-    mDecoder->DumpDebugInfo()->Then(mAbstractMainThread, __func__,
-                                    promise.get(),
-                                    &Promise::MaybeResolveWithUndefined);
-  } else {
-    promise->MaybeResolveWithUndefined();
-  }
-  return promise.forget();
-}
-
 void HTMLMediaElement::SetVisible(bool aVisible) {
   mForcedHidden = !aVisible;
   if (mDecoder) {
     mDecoder->SetForcedHidden(!aVisible);
   }
 }
 
 bool HTMLMediaElement::IsVideoDecodingSuspended() const {
@@ -7153,31 +7119,22 @@ void HTMLMediaElement::AsyncRejectPendin
   }
 
   nsCOMPtr<nsIRunnable> event = new nsResolveOrRejectPendingPlayPromisesRunner(
       this, TakePendingPlayPromises(), aError);
 
   mMainThreadEventTarget->Dispatch(event.forget());
 }
 
-void HTMLMediaElement::GetEMEInfo(nsString& aEMEInfo) {
+void HTMLMediaElement::GetEMEInfo(dom::EMEDebugInfo& aInfo) {
   if (!mMediaKeys) {
     return;
   }
-
-  nsString keySystem;
-  mMediaKeys->GetKeySystem(keySystem);
-
-  nsString sessionsInfo;
-  mMediaKeys->GetSessionsInfo(sessionsInfo);
-
-  aEMEInfo.AppendLiteral("Key System=");
-  aEMEInfo.Append(keySystem);
-  aEMEInfo.AppendLiteral(" SessionsInfo=");
-  aEMEInfo.Append(sessionsInfo);
+  mMediaKeys->GetKeySystem(aInfo.mKeySystem);
+  mMediaKeys->GetSessionsInfo(aInfo.mSessionsInfo);
 }
 
 void HTMLMediaElement::NotifyDecoderActivityChanges() const {
   if (mDecoder) {
     mDecoder->NotifyOwnerActivityChanged(!IsHidden(), mVisibilityState,
                                          IsInComposedDoc());
   }
 }
--- a/dom/html/HTMLMediaElement.h
+++ b/dom/html/HTMLMediaElement.h
@@ -13,31 +13,32 @@
 #include "MediaDecoderOwner.h"
 #include "MediaPromiseDefs.h"
 #include "nsCycleCollectionParticipant.h"
 #include "nsIObserver.h"
 #include "mozilla/CORSMode.h"
 #include "DecoderTraits.h"
 #include "nsIAudioChannelAgent.h"
 #include "mozilla/Attributes.h"
-#include "mozilla/dom/TextTrackManager.h"
+#include "mozilla/StateWatching.h"
 #include "mozilla/WeakPtr.h"
+#include "mozilla/dom/HTMLMediaElementBinding.h"
+#include "mozilla/dom/MediaDebugInfoBinding.h"
 #include "mozilla/dom/MediaKeys.h"
-#include "mozilla/StateWatching.h"
+#include "mozilla/dom/TextTrackManager.h"
 #include "nsGkAtoms.h"
 #include "PrincipalChangeObserver.h"
 #include "nsStubMutationObserver.h"
 #include "MediaSegment.h"  // for PrincipalHandle, GraphTime
 
 // X.h on Linux #defines CurrentTime as 0L, so we have to #undef it here.
 #ifdef CurrentTime
 #  undef CurrentTime
 #endif
 
-#include "mozilla/dom/HTMLMediaElementBinding.h"
 
 // Define to output information on decoding and painting framerate
 /* #define DEBUG_FRAME_RATE 1 */
 
 typedef uint16_t nsMediaNetworkState;
 typedef uint16_t nsMediaReadyState;
 typedef uint32_t SuspendTypes;
 typedef uint32_t AudibleChangedReasons;
@@ -287,17 +288,17 @@ class HTMLMediaElement : public nsGeneri
   // to the image container, we return the last video principal we had. Should
   // the image container be empty with no live video tracks, we return nullptr.
   already_AddRefed<nsIPrincipal> GetCurrentVideoPrincipal();
 
   // called to notify that the principal of the decoder's media resource has
   // changed.
   void NotifyDecoderPrincipalChanged() final;
 
-  void GetEMEInfo(nsString& aEMEInfo);
+  void GetEMEInfo(dom::EMEDebugInfo& aInfo);
 
   class StreamCaptureTrackSource;
 
   // Update the visual size of the media. Called from the decoder on the
   // main thread when/if the size changes.
   virtual void UpdateMediaSize(const nsIntSize& aSize);
   // Like UpdateMediaSize, but only updates the size if no size has yet
   // been set.
@@ -533,33 +534,28 @@ class HTMLMediaElement : public nsGeneri
 
   // Returns whether a call to Play() would be rejected with NotAllowedError.
   // This assumes "worst case" for unknowns. So if prompting for permission is
   // enabled and no permission is stored, this behaves as if the user would
   // opt to block.
   bool AllowedToPlay() const;
 
   already_AddRefed<MediaSource> GetMozMediaSourceObject() const;
-  // Returns a string describing the state of the media player internal
-  // data. Used for debugging purposes.
-  void GetMozDebugReaderData(nsAString& aString);
 
   // Returns a promise which will be resolved after collecting debugging
   // data from decoder/reader/MDSM. Used for debugging purposes.
   already_AddRefed<Promise> MozRequestDebugInfo(ErrorResult& aRv);
 
   // Enables DecoderDoctorLogger logging. Used for debugging purposes.
   static void MozEnableDebugLog(const GlobalObject&);
 
   // Returns a promise which will be resolved after collecting debugging
   // log associated with this element. Used for debugging purposes.
   already_AddRefed<Promise> MozRequestDebugLog(ErrorResult& aRv);
 
-  already_AddRefed<Promise> MozDumpDebugInfo();
-
   // For use by mochitests. Enabling pref "media.test.video-suspend"
   void SetVisible(bool aVisible);
 
   // For use by mochitests. Enabling pref "media.test.video-suspend"
   bool HasSuspendTaint() const;
 
   // For use by mochitests.
   bool IsVideoDecodingSuspended() const;
--- a/dom/media/BaseMediaResource.h
+++ b/dom/media/BaseMediaResource.h
@@ -7,16 +7,17 @@
 #define BaseMediaResource_h
 
 #include "MediaResource.h"
 #include "MediaResourceCallback.h"
 #include "MediaCache.h"
 #include "nsIChannel.h"
 #include "nsIURI.h"
 #include "nsIStreamListener.h"
+#include "mozilla/dom/MediaDebugInfoBinding.h"
 
 class nsIPrincipal;
 
 namespace mozilla {
 
 DDLoggedTypeDeclNameAndBase(BaseMediaResource, MediaResource);
 
 class BaseMediaResource : public MediaResource,
@@ -103,17 +104,17 @@ class BaseMediaResource : public MediaRe
     // - mCallback
     return 0;
   }
 
   virtual size_t SizeOfIncludingThis(MallocSizeOf aMallocSizeOf) const {
     return aMallocSizeOf(this) + SizeOfExcludingThis(aMallocSizeOf);
   }
 
-  virtual nsCString GetDebugInfo() { return nsCString(); }
+  virtual void GetDebugInfo(dom::MediaResourceDebugInfo& aInfo) {}
 
  protected:
   BaseMediaResource(MediaResourceCallback* aCallback, nsIChannel* aChannel,
                     nsIURI* aURI)
       : mCallback(aCallback),
         mChannel(aChannel),
         mURI(aURI),
         mLoadInBackground(false) {}
--- a/dom/media/ChannelMediaDecoder.cpp
+++ b/dom/media/ChannelMediaDecoder.cpp
@@ -1,15 +1,16 @@
 /* -*- Mode: C++; tab-width: 2; indent-tabs-mode: nil; c-basic-offset: 2 -*- */
 /* vim:set ts=2 sw=2 sts=2 et cindent: */
 /* This Source Code Form is subject to the terms of the Mozilla Public
  * License, v. 2.0. If a copy of the MPL was not distributed with this
  * file, You can obtain one at http://mozilla.org/MPL/2.0/. */
 
 #include "ChannelMediaDecoder.h"
+#include "ChannelMediaResource.h"
 #include "DecoderTraits.h"
 #include "MediaDecoderStateMachine.h"
 #include "MediaFormatReader.h"
 #include "BaseMediaResource.h"
 #include "MediaShutdownManager.h"
 #include "mozilla/StaticPrefs.h"
 #include "VideoUtils.h"
 
@@ -530,20 +531,19 @@ void ChannelMediaDecoder::MetadataLoaded
     UniquePtr<MediaInfo> aInfo, UniquePtr<MetadataTags> aTags,
     MediaDecoderEventVisibility aEventVisibility) {
   MediaDecoder::MetadataLoaded(std::move(aInfo), std::move(aTags),
                                aEventVisibility);
   // Set mode to PLAYBACK after reading metadata.
   mResource->SetReadMode(MediaCacheStream::MODE_PLAYBACK);
 }
 
-nsCString ChannelMediaDecoder::GetDebugInfo() {
-  nsCString str = MediaDecoder::GetDebugInfo();
+void ChannelMediaDecoder::GetDebugInfo(dom::MediaDecoderDebugInfo& aInfo) {
+  MediaDecoder::GetDebugInfo(aInfo);
   if (mResource) {
-    AppendStringIfNotEmpty(str, mResource->GetDebugInfo());
+    mResource->GetDebugInfo(aInfo.mResource);
   }
-  return str;
 }
 
 }  // namespace mozilla
 
 // avoid redefined macro in unified build
 #undef LOG
--- a/dom/media/ChannelMediaDecoder.h
+++ b/dom/media/ChannelMediaDecoder.h
@@ -65,17 +65,17 @@ class ChannelMediaDecoder
                       MediaDecoderEventVisibility aEventVisibility) override;
   void NotifyPrincipalChanged() override;
 
   RefPtr<ResourceCallback> mResourceCallback;
   RefPtr<BaseMediaResource> mResource;
 
   explicit ChannelMediaDecoder(MediaDecoderInit& aInit);
 
-  nsCString GetDebugInfo() override;
+  void GetDebugInfo(dom::MediaDecoderDebugInfo& aInfo);
 
  public:
   // Create a decoder for the given aType. Returns null if we were unable
   // to create the decoder, for example because the requested MIME type in
   // the init struct was unsupported.
   static already_AddRefed<ChannelMediaDecoder> Create(
       MediaDecoderInit& aInit, DecoderDoctorDiagnostics* aDiagnostics);
 
--- a/dom/media/ChannelMediaResource.cpp
+++ b/dom/media/ChannelMediaResource.cpp
@@ -906,19 +906,18 @@ void ChannelMediaResource::Pin() { mCach
 void ChannelMediaResource::Unpin() { mCacheStream.Unpin(); }
 
 double ChannelMediaResource::GetDownloadRate(bool* aIsReliable) {
   return mCacheStream.GetDownloadRate(aIsReliable);
 }
 
 int64_t ChannelMediaResource::GetLength() { return mCacheStream.GetLength(); }
 
-nsCString ChannelMediaResource::GetDebugInfo() {
-  return NS_LITERAL_CSTRING("ChannelMediaResource: ") +
-         mCacheStream.GetDebugInfo();
+void ChannelMediaResource::GetDebugInfo(dom::MediaResourceDebugInfo& aInfo) {
+  mCacheStream.GetDebugInfo(aInfo.mCacheStream);
 }
 
 // ChannelSuspendAgent
 
 bool ChannelSuspendAgent::Suspend() {
   MOZ_ASSERT(NS_IsMainThread());
   SuspendInternal();
   if (++mSuspendCount == 1) {
--- a/dom/media/ChannelMediaResource.h
+++ b/dom/media/ChannelMediaResource.h
@@ -150,17 +150,17 @@ class ChannelMediaResource
 
     return size;
   }
 
   size_t SizeOfIncludingThis(MallocSizeOf aMallocSizeOf) const override {
     return aMallocSizeOf(this) + SizeOfExcludingThis(aMallocSizeOf);
   }
 
-  nsCString GetDebugInfo() override;
+  void GetDebugInfo(dom::MediaResourceDebugInfo& aInfo) override;
 
   class Listener final : public nsIStreamListener,
                          public nsIInterfaceRequestor,
                          public nsIChannelEventSink,
                          public nsIThreadRetargetableStreamListener {
     ~Listener() {}
 
    public:
--- a/dom/media/MediaCache.cpp
+++ b/dom/media/MediaCache.cpp
@@ -480,17 +480,18 @@ class MediaCache {
   static bool sThreadInit;
 
  private:
   // MediaCache thread only. True if we're on a cellular network connection.
   static bool sOnCellular;
 
   // Used by MediaCacheStream::GetDebugInfo() only for debugging.
   // Don't add new callers to this function.
-  friend nsCString MediaCacheStream::GetDebugInfo();
+  friend void MediaCacheStream::GetDebugInfo(
+      dom::MediaCacheStreamDebugInfo& aInfo);
   mozilla::Monitor& GetMonitorOnTheMainThread() {
     MOZ_DIAGNOSTIC_ASSERT(NS_IsMainThread());
     return mMonitor;
   }
 };
 
 // Initialized to nullptr by non-local static initialization.
 /* static */
@@ -2770,21 +2771,22 @@ nsresult MediaCacheStream::GetCachedRang
 }
 
 double MediaCacheStream::GetDownloadRate(bool* aIsReliable) {
   MOZ_ASSERT(!NS_IsMainThread());
   AutoLock lock(mMediaCache->Monitor());
   return mDownloadStatistics.GetRate(aIsReliable);
 }
 
-nsCString MediaCacheStream::GetDebugInfo() {
+void MediaCacheStream::GetDebugInfo(dom::MediaCacheStreamDebugInfo& aInfo) {
   AutoLock lock(mMediaCache->GetMonitorOnTheMainThread());
-  return nsPrintfCString("mStreamLength=%" PRId64 " mChannelOffset=%" PRId64
-                         " mCacheSuspended=%d mChannelEnded=%d mLoadID=%u",
-                         mStreamLength, mChannelOffset, mCacheSuspended,
-                         mChannelEnded, mLoadID);
+  aInfo.mStreamLength = mStreamLength;
+  aInfo.mChannelOffset = mChannelOffset;
+  aInfo.mCacheSuspended = mCacheSuspended;
+  aInfo.mChannelEnded = mChannelEnded;
+  aInfo.mLoadID = mLoadID;
 }
 
 }  // namespace mozilla
 
 // avoid redefined macro in unified build
 #undef LOG
 #undef LOGI
--- a/dom/media/MediaCache.h
+++ b/dom/media/MediaCache.h
@@ -6,16 +6,17 @@
 
 #ifndef MediaCache_h_
 #define MediaCache_h_
 
 #include "DecoderDoctorLogger.h"
 #include "Intervals.h"
 #include "mozilla/Result.h"
 #include "mozilla/UniquePtr.h"
+#include "mozilla/dom/MediaDebugInfoBinding.h"
 #include "nsCOMPtr.h"
 #include "nsHashKeys.h"
 #include "nsTArray.h"
 #include "nsTHashtable.h"
 
 #include "MediaChannelStatistics.h"
 
 class nsIEventTarget;
@@ -354,17 +355,17 @@ class MediaCacheStream : public DecoderD
   // 'Read' for argument and return details.
   nsresult ReadAt(int64_t aOffset, char* aBuffer, uint32_t aCount,
                   uint32_t* aBytes);
 
   void ThrottleReadahead(bool bThrottle);
 
   size_t SizeOfExcludingThis(MallocSizeOf aMallocSizeOf) const;
 
-  nsCString GetDebugInfo();
+  void GetDebugInfo(dom::MediaCacheStreamDebugInfo& aInfo);
 
  private:
   friend class MediaCache;
 
   /**
    * A doubly-linked list of blocks. Add/Remove/Get methods are all
    * constant time. We declare this here so that a stream can contain a
    * BlockList of its read-ahead blocks. Blocks are referred to by index
--- a/dom/media/MediaDecoder.cpp
+++ b/dom/media/MediaDecoder.cpp
@@ -1319,77 +1319,46 @@ MediaDecoderOwner::NextFrameStatus Media
   auto currentPosition = CurrentPosition();
   media::TimeInterval interval(
       currentPosition, currentPosition + DEFAULT_NEXT_FRAME_AVAILABLE_BUFFERED);
   return GetBuffered().Contains(interval)
              ? MediaDecoderOwner::NEXT_FRAME_AVAILABLE
              : MediaDecoderOwner::NEXT_FRAME_UNAVAILABLE;
 }
 
-nsCString MediaDecoder::GetDebugInfo() {
-  return nsPrintfCString(
-      "MediaDecoder=%p: channels=%u rate=%u hasAudio=%d hasVideo=%d "
-      "mPlayState=%s",
-      this, mInfo ? mInfo->mAudio.mChannels : 0,
-      mInfo ? mInfo->mAudio.mRate : 0, mInfo ? mInfo->HasAudio() : 0,
-      mInfo ? mInfo->HasVideo() : 0, PlayStateStr());
+void MediaDecoder::GetDebugInfo(dom::MediaDecoderDebugInfo& aInfo) {
+  aInfo.mInstance = NS_ConvertUTF8toUTF16(nsPrintfCString("%p", this));
+  aInfo.mChannels = mInfo ? mInfo->mAudio.mChannels : 0;
+  aInfo.mRate = mInfo ? mInfo->mAudio.mRate : 0;
+  aInfo.mHasAudio = mInfo ? mInfo->HasAudio() : false;
+  aInfo.mHasVideo = mInfo ? mInfo->HasVideo() : false;
+  aInfo.mPlayState = NS_ConvertUTF8toUTF16(PlayStateStr());
+  aInfo.mContainerType =
+      NS_ConvertUTF8toUTF16(ContainerType().Type().AsString());
+  mReader->GetDebugInfo(aInfo.mReader);
 }
 
-RefPtr<GenericPromise> MediaDecoder::DumpDebugInfo() {
+RefPtr<GenericPromise> MediaDecoder::RequestDebugInfo(
+    MediaDecoderDebugInfo& aInfo) {
   MOZ_DIAGNOSTIC_ASSERT(!IsShutdown());
-  nsCString str = GetDebugInfo();
-
-  nsAutoCString readerStr;
-  GetMozDebugReaderData(readerStr);
-  if (!readerStr.IsEmpty()) {
-    str += "\nreader data:\n";
-    str += readerStr;
-  }
+  GetDebugInfo(aInfo);
 
   if (!GetStateMachine()) {
-    DUMP("%s", str.get());
     return GenericPromise::CreateAndResolve(true, __func__);
   }
 
-  return GetStateMachine()->RequestDebugInfo()->Then(
-      SystemGroup::AbstractMainThreadFor(TaskCategory::Other), __func__,
-      [str](const nsACString& aString) {
-        DUMP("%s", str.get());
-        DUMP("%s", aString.Data());
-        return GenericPromise::CreateAndResolve(true, __func__);
-      },
-      [str]() {
-        DUMP("%s", str.get());
-        return GenericPromise::CreateAndResolve(true, __func__);
-      });
-}
-
-RefPtr<MediaDecoder::DebugInfoPromise> MediaDecoder::RequestDebugInfo() {
-  MOZ_DIAGNOSTIC_ASSERT(!IsShutdown());
-
-  auto str = GetDebugInfo();
-  if (!GetStateMachine()) {
-    return DebugInfoPromise::CreateAndResolve(str, __func__);
-  }
-
-  return GetStateMachine()->RequestDebugInfo()->Then(
-      SystemGroup::AbstractMainThreadFor(TaskCategory::Other), __func__,
-      [str](const nsACString& aString) {
-        nsCString result = str + nsCString("\n") + aString;
-        return DebugInfoPromise::CreateAndResolve(result, __func__);
-      },
-      [str]() { return DebugInfoPromise::CreateAndResolve(str, __func__); });
-}
-
-void MediaDecoder::GetMozDebugReaderData(nsACString& aString) {
-  aString += nsPrintfCString("Container Type: %s\n",
-                             ContainerType().Type().AsString().get());
-  if (mReader) {
-    mReader->GetMozDebugReaderData(aString);
-  }
+  return GetStateMachine()
+      ->RequestDebugInfo(aInfo.mStateMachine)
+      ->Then(
+          SystemGroup::AbstractMainThreadFor(TaskCategory::Other), __func__,
+          []() { return GenericPromise::CreateAndResolve(true, __func__); },
+          []() {
+            UNREACHABLE();
+            return GenericPromise::CreateAndResolve(false, __func__);
+          });
 }
 
 void MediaDecoder::NotifyAudibleStateChanged() {
   MOZ_DIAGNOSTIC_ASSERT(!IsShutdown());
   GetOwner()->SetAudibleState(mIsAudioDataAudible);
 }
 
 MediaMemoryTracker::MediaMemoryTracker() {}
--- a/dom/media/MediaDecoder.h
+++ b/dom/media/MediaDecoder.h
@@ -21,16 +21,17 @@
 #  include "TimeUnits.h"
 #  include "TrackID.h"
 #  include "mozilla/Atomics.h"
 #  include "mozilla/CDMProxy.h"
 #  include "mozilla/MozPromise.h"
 #  include "mozilla/ReentrantMonitor.h"
 #  include "mozilla/StateMirroring.h"
 #  include "mozilla/StateWatching.h"
+#  include "mozilla/dom/MediaDebugInfoBinding.h"
 #  include "nsAutoPtr.h"
 #  include "nsCOMPtr.h"
 #  include "nsIObserver.h"
 #  include "nsISupports.h"
 #  include "nsITimer.h"
 
 class nsIPrincipal;
 
@@ -384,24 +385,19 @@ class MediaDecoder : public DecoderDocto
   }
 
   MediaDecoderOwner::NextFrameStatus NextFrameStatus() const {
     return mNextFrameStatus;
   }
 
   virtual MediaDecoderOwner::NextFrameStatus NextFrameBufferedStatus();
 
-  // Returns a string describing the state of the media player internal
-  // data. Used for debugging purposes.
-  virtual void GetMozDebugReaderData(nsACString& aString);
+  RefPtr<GenericPromise> RequestDebugInfo(dom::MediaDecoderDebugInfo& aInfo);
 
-  RefPtr<GenericPromise> DumpDebugInfo();
-
-  using DebugInfoPromise = MozPromise<nsCString, bool, true>;
-  RefPtr<DebugInfoPromise> RequestDebugInfo();
+  void GetDebugInfo(dom::MediaDecoderDebugInfo& aInfo);
 
  protected:
   virtual ~MediaDecoder();
 
   // Called when the first audio and/or video from the media file has been
   // loaded by the state machine. Call on the main thread only.
   virtual void FirstFrameLoaded(nsAutoPtr<MediaInfo> aInfo,
                                 MediaDecoderEventVisibility aEventVisibility);
@@ -465,18 +461,16 @@ class MediaDecoder : public DecoderDocto
   RefPtr<MediaFormatReader> mReader;
 
   // Amount of buffered data ahead of current time required to consider that
   // the next frame is available.
   // An arbitrary value of 250ms is used.
   static constexpr auto DEFAULT_NEXT_FRAME_AVAILABLE_BUFFERED =
       media::TimeUnit::FromMicroseconds(250000);
 
-  virtual nsCString GetDebugInfo();
-
  private:
   // Called when the owner's activity changed.
   void NotifyCompositor();
 
   void OnPlaybackErrorEvent(const MediaResult& aError);
 
   void OnDecoderDoctorEvent(DecoderDoctorEvent aEvent);
 
--- a/dom/media/MediaDecoderStateMachine.cpp
+++ b/dom/media/MediaDecoderStateMachine.cpp
@@ -209,17 +209,18 @@ class MediaDecoderStateMachine::StateObj
   virtual RefPtr<ShutdownPromise> HandleShutdown();
 
   virtual void HandleVideoSuspendTimeout() = 0;
 
   virtual void HandleResumeVideoDecoding(const TimeUnit& aTarget);
 
   virtual void HandlePlayStateChanged(MediaDecoder::PlayState aPlayState) {}
 
-  virtual nsCString GetDebugInfo() { return nsCString(); }
+  virtual void GetDebugInfo(
+      MediaDecoderStateMachineDecodingStateDebugInfo& aInfo) {}
 
   virtual void HandleLoopingChanged() {}
 
  private:
   template <class S, typename R, typename... As>
   auto ReturnTypeHelper(R (S::*)(As...)) -> R;
 
   void Crash(const char* aReason, const char* aSite) {
@@ -631,18 +632,19 @@ class MediaDecoderStateMachine::Decoding
 
     if (aPlayState == MediaDecoder::PLAY_STATE_PAUSED) {
       StartDormantTimer();
     } else {
       mDormantTimer.Reset();
     }
   }
 
-  nsCString GetDebugInfo() override {
-    return nsPrintfCString("mIsPrerolling=%d", mIsPrerolling);
+  void GetDebugInfo(
+      MediaDecoderStateMachineDecodingStateDebugInfo& aInfo) override {
+    aInfo.mIsPrerolling = mIsPrerolling;
   }
 
   void HandleLoopingChanged() override { SetDecodingState(); }
 
  protected:
   virtual void EnsureAudioDecodeTaskQueued();
 
  private:
@@ -3695,52 +3697,46 @@ void MediaDecoderStateMachine::SetAudioC
 
 uint32_t MediaDecoderStateMachine::GetAmpleVideoFrames() const {
   MOZ_ASSERT(OnTaskQueue());
   return mReader->VideoIsHardwareAccelerated()
              ? std::max<uint32_t>(sVideoQueueHWAccelSize, MIN_VIDEO_QUEUE_SIZE)
              : std::max<uint32_t>(sVideoQueueDefaultSize, MIN_VIDEO_QUEUE_SIZE);
 }
 
-nsCString MediaDecoderStateMachine::GetDebugInfo() {
+void MediaDecoderStateMachine::GetDebugInfo(
+    dom::MediaDecoderStateMachineDebugInfo& aInfo) {
   MOZ_ASSERT(OnTaskQueue());
-  int64_t duration =
+  aInfo.mDuration =
       mDuration.Ref() ? mDuration.Ref().ref().ToMicroseconds() : -1;
-  auto str = nsPrintfCString(
-      "MDSM: duration=%" PRId64 " GetMediaTime=%" PRId64
-      " GetClock="
-      "%" PRId64
-      " mMediaSink=%p state=%s mPlayState=%d "
-      "mSentFirstFrameLoadedEvent=%d IsPlaying=%d mAudioStatus=%s "
-      "mVideoStatus=%s mDecodedAudioEndTime=%" PRId64
-      " mDecodedVideoEndTime=%" PRId64
-      " mAudioCompleted=%d "
-      "mVideoCompleted=%d %s",
-      duration, GetMediaTime().ToMicroseconds(),
-      mMediaSink->IsStarted() ? GetClock().ToMicroseconds() : -1,
-      mMediaSink.get(), ToStateStr(), mPlayState.Ref(),
-      mSentFirstFrameLoadedEvent, IsPlaying(), AudioRequestStatus(),
-      VideoRequestStatus(), mDecodedAudioEndTime.ToMicroseconds(),
-      mDecodedVideoEndTime.ToMicroseconds(), mAudioCompleted, mVideoCompleted,
-      mStateObj->GetDebugInfo().get());
-
-  AppendStringIfNotEmpty(str, mMediaSink->GetDebugInfo());
-
-  return std::move(str);
+  aInfo.mMediaTime = GetMediaTime().ToMicroseconds();
+  aInfo.mClock = mMediaSink->IsStarted() ? GetClock().ToMicroseconds() : -1;
+  aInfo.mPlayState = int32_t(mPlayState.Ref());
+  aInfo.mSentFirstFrameLoadedEvent = mSentFirstFrameLoadedEvent;
+  aInfo.mIsPlaying = IsPlaying();
+  aInfo.mAudioRequestStatus = NS_ConvertUTF8toUTF16(AudioRequestStatus());
+  aInfo.mVideoRequestStatus = NS_ConvertUTF8toUTF16(VideoRequestStatus());
+  aInfo.mDecodedAudioEndTime = mDecodedAudioEndTime.ToMicroseconds();
+  aInfo.mDecodedVideoEndTime = mDecodedVideoEndTime.ToMicroseconds();
+  aInfo.mAudioCompleted = mAudioCompleted;
+  aInfo.mVideoCompleted = mVideoCompleted;
+  mStateObj->GetDebugInfo(aInfo.mStateObj);
+  mMediaSink->GetDebugInfo(aInfo.mMediaSink);
 }
 
-RefPtr<MediaDecoder::DebugInfoPromise>
-MediaDecoderStateMachine::RequestDebugInfo() {
-  using PromiseType = MediaDecoder::DebugInfoPromise;
-  RefPtr<PromiseType::Private> p = new PromiseType::Private(__func__);
+RefPtr<GenericPromise> MediaDecoderStateMachine::RequestDebugInfo(
+    dom::MediaDecoderStateMachineDebugInfo& aInfo) {
+  RefPtr<GenericPromise::Private> p = new GenericPromise::Private(__func__);
   RefPtr<MediaDecoderStateMachine> self = this;
   nsresult rv = OwnerThread()->Dispatch(
-      NS_NewRunnableFunction(
-          "MediaDecoderStateMachine::RequestDebugInfo",
-          [self, p]() { p->Resolve(self->GetDebugInfo(), __func__); }),
+      NS_NewRunnableFunction("MediaDecoderStateMachine::RequestDebugInfo",
+                             [self, p, &aInfo]() {
+                               self->GetDebugInfo(aInfo);
+                               p->Resolve(true, __func__);
+                             }),
       AbstractThread::TailDispatch);
   MOZ_DIAGNOSTIC_ASSERT(NS_SUCCEEDED(rv));
   Unused << rv;
   return p.forget();
 }
 
 void MediaDecoderStateMachine::SetOutputStreamPrincipal(
     const nsCOMPtr<nsIPrincipal>& aPrincipal) {
--- a/dom/media/MediaDecoderStateMachine.h
+++ b/dom/media/MediaDecoderStateMachine.h
@@ -91,16 +91,17 @@ hardware (via AudioStream).
 #  include "MediaQueue.h"
 #  include "MediaSink.h"
 #  include "MediaStatistics.h"
 #  include "MediaTimer.h"
 #  include "SeekJob.h"
 #  include "mozilla/Attributes.h"
 #  include "mozilla/ReentrantMonitor.h"
 #  include "mozilla/StateMirroring.h"
+#  include "mozilla/dom/MediaDebugInfoBinding.h"
 #  include "nsAutoPtr.h"
 #  include "nsThreadUtils.h"
 
 namespace mozilla {
 
 class AbstractThread;
 class AudioSegment;
 class DecodedStream;
@@ -177,17 +178,18 @@ class MediaDecoderStateMachine
     DECODER_STATE_BUFFERING,
     DECODER_STATE_COMPLETED,
     DECODER_STATE_SHUTDOWN
   };
 
   // Returns the state machine task queue.
   TaskQueue* OwnerThread() const { return mTaskQueue; }
 
-  RefPtr<MediaDecoder::DebugInfoPromise> RequestDebugInfo();
+  RefPtr<GenericPromise> RequestDebugInfo(
+      dom::MediaDecoderStateMachineDebugInfo& aInfo);
 
   void SetOutputStreamPrincipal(const nsCOMPtr<nsIPrincipal>& aPrincipal);
   void SetOutputStreamCORSMode(CORSMode aCORSMode);
   // If an OutputStreamManager does not exist, one will be created.
   void EnsureOutputStreamManager(MediaStreamGraph* aGraph);
   // If an OutputStreamManager exists, tracks matching aLoadedInfo will be
   // created unless they already exist in the manager.
   void EnsureOutputStreamManagerHasTracks(const MediaInfo& aLoadedInfo);
@@ -305,17 +307,17 @@ class MediaDecoderStateMachine
   class VideoOnlySeekingState;
   class BufferingState;
   class CompletedState;
   class ShutdownState;
 
   static const char* ToStateStr(State aState);
   const char* ToStateStr();
 
-  nsCString GetDebugInfo();
+  void GetDebugInfo(dom::MediaDecoderStateMachineDebugInfo& aInfo);
 
   // Functions used by assertions to ensure we're calling things
   // on the appropriate threads.
   bool OnTaskQueue() const;
 
   // Initialization that needs to happen on the task queue. This is the first
   // task that gets run on the task queue, and is dispatched from the MDSM
   // constructor immediately after the task queue is created.
--- a/dom/media/MediaFormatReader.cpp
+++ b/dom/media/MediaFormatReader.cpp
@@ -2948,17 +2948,17 @@ void MediaFormatReader::UpdateBuffered()
   }
 }
 
 layers::ImageContainer* MediaFormatReader::GetImageContainer() {
   return mVideoFrameContainer ? mVideoFrameContainer->GetImageContainer()
                               : nullptr;
 }
 
-void MediaFormatReader::GetMozDebugReaderData(nsACString& aString) {
+void MediaFormatReader::GetDebugInfo(dom::MediaFormatReaderDebugInfo& aInfo) {
   nsCString result;
   nsAutoCString audioDecoderName("unavailable");
   nsAutoCString videoDecoderName = audioDecoderName;
   nsAutoCString audioType("none");
   nsAutoCString videoType("none");
 
   AudioInfo audioInfo;
   {
@@ -2977,80 +2977,86 @@ void MediaFormatReader::GetMozDebugReade
     if (HasVideo()) {
       videoInfo = *mVideo.GetWorkingInfo()->GetAsVideoInfo();
       videoDecoderName = mVideo.mDecoder ? mVideo.mDecoder->GetDescriptionName()
                                          : mVideo.mDescription;
       videoType = videoInfo.mMimeType;
     }
   }
 
-  result += nsPrintfCString("Audio Decoder(%s, %u channels @ %0.1fkHz): %s\n",
-                            audioType.get(), audioInfo.mChannels,
-                            audioInfo.mRate / 1000.0f, audioDecoderName.get());
-  result += nsPrintfCString("Audio Frames Decoded: %" PRIu64 "\n",
-                            mAudio.mNumSamplesOutputTotal);
+  aInfo.mAudioDecoderName = NS_ConvertUTF8toUTF16(audioDecoderName);
+  aInfo.mAudioType = NS_ConvertUTF8toUTF16(audioType);
+  aInfo.mAudioChannels = audioInfo.mChannels;
+  aInfo.mAudioRate = audioInfo.mRate / 1000.0f;
+  aInfo.mAudioFramesDecoded = mAudio.mNumSamplesOutputTotal;
+
   if (HasAudio()) {
-    result += nsPrintfCString(
-        "Audio State: ni=%d no=%d wp=%d demuxr=%d demuxq=%u decoder=%d tt=%.1f "
-        "tths=%d in=%" PRIu64 " out=%" PRIu64
-        " qs=%u pending=%u wfd=%d eos=%d ds=%d wfk=%d sid=%u\n",
-        NeedInput(mAudio), mAudio.HasPromise(),
-        !mAudio.mWaitingPromise.IsEmpty(), mAudio.mDemuxRequest.Exists(),
-        uint32_t(mAudio.mQueuedSamples.Length()),
-        mAudio.mDecodeRequest.Exists(),
+    aInfo.mAudioState.mNeedInput = NeedInput(mAudio);
+    aInfo.mAudioState.mHasPromise = mAudio.HasPromise();
+    aInfo.mAudioState.mWaitingPromise = !mAudio.mWaitingPromise.IsEmpty();
+    aInfo.mAudioState.mHasDemuxRequest = mAudio.mDemuxRequest.Exists();
+    aInfo.mAudioState.mDemuxQueueSize =
+        uint32_t(mAudio.mQueuedSamples.Length());
+    aInfo.mAudioState.mHasDecoder = mAudio.mDecodeRequest.Exists();
+    aInfo.mAudioState.mTimeTreshold =
         mAudio.mTimeThreshold ? mAudio.mTimeThreshold.ref().Time().ToSeconds()
-                              : -1.0,
-        mAudio.mTimeThreshold ? mAudio.mTimeThreshold.ref().mHasSeeked : -1,
-        mAudio.mNumSamplesInput, mAudio.mNumSamplesOutput,
-        unsigned(size_t(mAudio.mSizeOfQueue)),
-        unsigned(mAudio.mOutput.Length()), mAudio.mWaitingForData,
-        mAudio.mDemuxEOS, int32_t(mAudio.mDrainState), mAudio.mWaitingForKey,
-        mAudio.mLastStreamSourceID);
+                              : -1.0;
+    aInfo.mAudioState.mTimeTresholdHasSeeked =
+        mAudio.mTimeThreshold ? mAudio.mTimeThreshold.ref().mHasSeeked : false;
+    aInfo.mAudioState.mNumSamplesInput = mAudio.mNumSamplesInput;
+    aInfo.mAudioState.mNumSamplesOutput = mAudio.mNumSamplesOutput;
+    aInfo.mAudioState.mQueueSize = size_t(mAudio.mSizeOfQueue);
+    aInfo.mAudioState.mPending = mAudio.mOutput.Length();
+    aInfo.mAudioState.mWaitingForData = mAudio.mWaitingForData;
+    aInfo.mAudioState.mDemuxEOS = mAudio.mDemuxEOS;
+    aInfo.mAudioState.mDrainState = int32_t(mAudio.mDrainState);
+    aInfo.mAudioState.mWaitingForKey = mAudio.mWaitingForKey;
+    aInfo.mAudioState.mLastStreamSourceID = mAudio.mLastStreamSourceID;
   }
 
-  result += nsPrintfCString(
-      "Video Decoder(%s, %dx%d @ %0.2f): %s\n", videoType.get(),
-      videoInfo.mDisplay.width < 0 ? 0 : videoInfo.mDisplay.width,
-      videoInfo.mDisplay.height < 0 ? 0 : videoInfo.mDisplay.height,
-      mVideo.mMeanRate.Mean(), videoDecoderName.get());
-
-  result +=
-      nsPrintfCString("Hardware Video Decoding: %s\n",
-                      VideoIsHardwareAccelerated() ? "enabled" : "disabled");
-  result += nsPrintfCString(
-      "Video Frames Decoded: %" PRIu64 " (skipped=%" PRIu64 ")\n",
-      mVideo.mNumSamplesOutputTotal, mVideo.mNumSamplesSkippedTotal);
+  aInfo.mVideoDecoderName = NS_ConvertUTF8toUTF16(videoDecoderName);
+  aInfo.mVideoType = NS_ConvertUTF8toUTF16(videoType);
+  aInfo.mVideoWidth =
+      videoInfo.mDisplay.width < 0 ? 0 : videoInfo.mDisplay.width;
+  aInfo.mVideoHeight =
+      videoInfo.mDisplay.height < 0 ? 0 : videoInfo.mDisplay.height;
+  aInfo.mVideoRate = mVideo.mMeanRate.Mean();
+  aInfo.mVideoHardwareAccelerated = VideoIsHardwareAccelerated();
+  aInfo.mVideoNumSamplesOutputTotal = mVideo.mNumSamplesOutputTotal;
+  aInfo.mVideoNumSamplesSkippedTotal = mVideo.mNumSamplesSkippedTotal;
+
   if (HasVideo()) {
-    result += nsPrintfCString(
-        "Video State: ni=%d no=%d wp=%d demuxr=%d demuxq=%u decoder=%d tt=%.1f "
-        "tths=%d in=%" PRIu64 " out=%" PRIu64
-        " qs=%u pending:%u wfd=%d eos=%d ds=%d wfk=%d sid=%u\n",
-        NeedInput(mVideo), mVideo.HasPromise(),
-        !mVideo.mWaitingPromise.IsEmpty(), mVideo.mDemuxRequest.Exists(),
-        uint32_t(mVideo.mQueuedSamples.Length()),
-        mVideo.mDecodeRequest.Exists(),
+    aInfo.mVideoState.mNeedInput = NeedInput(mVideo);
+    aInfo.mVideoState.mHasPromise = mVideo.HasPromise();
+    aInfo.mVideoState.mWaitingPromise = !mVideo.mWaitingPromise.IsEmpty();
+    aInfo.mVideoState.mHasDemuxRequest = mVideo.mDemuxRequest.Exists();
+    aInfo.mVideoState.mDemuxQueueSize =
+        uint32_t(mVideo.mQueuedSamples.Length());
+    aInfo.mVideoState.mHasDecoder = mVideo.mDecodeRequest.Exists();
+    aInfo.mVideoState.mTimeTreshold =
         mVideo.mTimeThreshold ? mVideo.mTimeThreshold.ref().Time().ToSeconds()
-                              : -1.0,
-        mVideo.mTimeThreshold ? mVideo.mTimeThreshold.ref().mHasSeeked : -1,
-        mVideo.mNumSamplesInput, mVideo.mNumSamplesOutput,
-        unsigned(size_t(mVideo.mSizeOfQueue)),
-        unsigned(mVideo.mOutput.Length()), mVideo.mWaitingForData,
-        mVideo.mDemuxEOS, int32_t(mVideo.mDrainState), mVideo.mWaitingForKey,
-        mVideo.mLastStreamSourceID);
+                              : -1.0;
+    aInfo.mVideoState.mTimeTresholdHasSeeked =
+        mVideo.mTimeThreshold ? mVideo.mTimeThreshold.ref().mHasSeeked : false;
+    aInfo.mVideoState.mNumSamplesInput = mVideo.mNumSamplesInput;
+    aInfo.mVideoState.mNumSamplesOutput = mVideo.mNumSamplesOutput;
+    aInfo.mVideoState.mQueueSize = size_t(mVideo.mSizeOfQueue);
+    aInfo.mVideoState.mPending = mVideo.mOutput.Length();
+    aInfo.mVideoState.mWaitingForData = mVideo.mWaitingForData;
+    aInfo.mVideoState.mDemuxEOS = mVideo.mDemuxEOS;
+    aInfo.mVideoState.mDrainState = int32_t(mVideo.mDrainState);
+    aInfo.mVideoState.mWaitingForKey = mVideo.mWaitingForKey;
+    aInfo.mVideoState.mLastStreamSourceID = mVideo.mLastStreamSourceID;
   }
 
-  // Looking at dropped frames in details.
+  // Looking at dropped frames
   FrameStatisticsData stats = mFrameStats->GetFrameStatisticsData();
-  result +=
-      nsPrintfCString("Dropped Frames: reader=%" PRIu64 " sink=%" PRIu64
-                      " compositor=%" PRIu64 "\n",
-                      stats.mDroppedDecodedFrames, stats.mDroppedSinkFrames,
-                      stats.mDroppedCompositorFrames);
-
-  aString += result;
+  aInfo.mFrameStats.mDroppedDecodedFrames = stats.mDroppedDecodedFrames;
+  aInfo.mFrameStats.mDroppedSinkFrames = stats.mDroppedSinkFrames;
+  aInfo.mFrameStats.mDroppedCompositorFrames = stats.mDroppedCompositorFrames;
 }
 
 void MediaFormatReader::SetVideoNullDecode(bool aIsNullDecode) {
   MOZ_ASSERT(OnTaskQueue());
   return SetNullDecode(TrackType::kVideoTrack, aIsNullDecode);
 }
 
 void MediaFormatReader::UpdateCompositor(
--- a/dom/media/MediaFormatReader.h
+++ b/dom/media/MediaFormatReader.h
@@ -8,16 +8,17 @@
 #  define MediaFormatReader_h_
 
 #  include "mozilla/Atomics.h"
 #  include "mozilla/Maybe.h"
 #  include "mozilla/Mutex.h"
 #  include "mozilla/StateMirroring.h"
 #  include "mozilla/StaticPrefs.h"
 #  include "mozilla/TaskQueue.h"
+#  include "mozilla/dom/MediaDebugInfoBinding.h"
 
 #  include "FrameStatistics.h"
 #  include "MediaEventSource.h"
 #  include "MediaDataDemuxer.h"
 #  include "MediaMetadataManager.h"
 #  include "MediaPromiseDefs.h"
 #  include "nsAutoPtr.h"
 #  include "PDMFactory.h"
@@ -173,19 +174,19 @@ class MediaFormatReader final
   // The MediaDecoderStateMachine uses various heuristics that assume that
   // raw media data is arriving sequentially from a network channel. This
   // makes sense in the <video src="foo"> case, but not for more advanced use
   // cases like MSE.
   bool UseBufferingHeuristics() const { return mTrackDemuxersMayBlock; }
 
   RefPtr<SetCDMPromise> SetCDMProxy(CDMProxy* aProxy);
 
-  // Returns a string describing the state of the decoder data.
+  // Returns a MediaDebugInfo structure
   // Used for debugging purposes.
-  void GetMozDebugReaderData(nsACString& aString);
+  void GetDebugInfo(dom::MediaFormatReaderDebugInfo& aInfo);
 
   // Switch the video decoder to NullDecoderModule. It might takes effective
   // since a few samples later depends on how much demuxed samples are already
   // queued in the original video decoder.
   void SetVideoNullDecode(bool aIsNullDecode);
 
   void UpdateCompositor(already_AddRefed<layers::KnowsCompositor>);
 
--- a/dom/media/mediasink/AudioSink.cpp
+++ b/dom/media/mediasink/AudioSink.cpp
@@ -493,19 +493,22 @@ uint32_t AudioSink::DrainConverter(uint3
       CreateAudioFromBuffer(std::move(convertedData), lastPacket);
   if (!data) {
     return 0;
   }
   mProcessedQueue.Push(data);
   return data->Frames();
 }
 
-nsCString AudioSink::GetDebugInfo() {
+void AudioSink::GetDebugInfo(dom::MediaSinkDebugInfo& aInfo) {
   MOZ_ASSERT(mOwnerThread->IsCurrentThreadIn());
-  return nsPrintfCString(
-      "AudioSink: StartTime=%" PRId64 " LastGoodPosition=%" PRId64
-      " Playing=%d  OutputRate=%u Written=%" PRId64
-      " Errored=%d PlaybackComplete=%d",
-      mStartTime.ToMicroseconds(), mLastGoodPosition.ToMicroseconds(), mPlaying,
-      mOutputRate, mWritten, bool(mErrored), bool(mPlaybackComplete));
+  aInfo.mAudioSinkWrapper.mAudioSink.mStartTime = mStartTime.ToMicroseconds();
+  aInfo.mAudioSinkWrapper.mAudioSink.mLastGoodPosition =
+      mLastGoodPosition.ToMicroseconds();
+  aInfo.mAudioSinkWrapper.mAudioSink.mIsPlaying = mPlaying;
+  aInfo.mAudioSinkWrapper.mAudioSink.mOutputRate = mOutputRate;
+  aInfo.mAudioSinkWrapper.mAudioSink.mWritten = mWritten;
+  aInfo.mAudioSinkWrapper.mAudioSink.mHasErrored = bool(mErrored);
+  aInfo.mAudioSinkWrapper.mAudioSink.mPlaybackComplete =
+      bool(mPlaybackComplete);
 }
 
 }  // namespace mozilla
--- a/dom/media/mediasink/AudioSink.h
+++ b/dom/media/mediasink/AudioSink.h
@@ -52,17 +52,17 @@ class AudioSink : private AudioStream::D
 
   void SetVolume(double aVolume);
   void SetPlaybackRate(double aPlaybackRate);
   void SetPreservesPitch(bool aPreservesPitch);
   void SetPlaying(bool aPlaying);
 
   MediaEventSource<bool>& AudibleEvent() { return mAudibleEvent; }
 
-  nsCString GetDebugInfo();
+  void GetDebugInfo(dom::MediaSinkDebugInfo& aInfo);
 
  private:
   // Allocate and initialize mAudioStream. Returns NS_OK on success.
   nsresult InitializeAudioStream(const PlaybackParams& aParams);
 
   // Interface of AudioStream::DataSource.
   // Called on the callback thread of cubeb.
   UniquePtr<AudioStream::Chunk> PopFrames(uint32_t aFrames) override;
--- a/dom/media/mediasink/AudioSinkWrapper.cpp
+++ b/dom/media/mediasink/AudioSinkWrapper.cpp
@@ -217,20 +217,19 @@ void AudioSinkWrapper::OnAudioEnded() {
   mAudioSinkEndedPromise.Complete();
   mPlayDuration = GetPosition();
   if (!mPlayStartTime.IsNull()) {
     mPlayStartTime = TimeStamp::Now();
   }
   mAudioEnded = true;
 }
 
-nsCString AudioSinkWrapper::GetDebugInfo() {
+void AudioSinkWrapper::GetDebugInfo(dom::MediaSinkDebugInfo& aInfo) {
   AssertOwnerThread();
-  auto str = nsPrintfCString(
-      "AudioSinkWrapper: IsStarted=%d IsPlaying=%d AudioEnded=%d", IsStarted(),
-      IsPlaying(), mAudioEnded);
+  aInfo.mAudioSinkWrapper.mIsPlaying = IsPlaying();
+  aInfo.mAudioSinkWrapper.mIsStarted = IsStarted();
+  aInfo.mAudioSinkWrapper.mAudioEnded = mAudioEnded;
   if (mAudioSink) {
-    AppendStringIfNotEmpty(str, mAudioSink->GetDebugInfo());
+    mAudioSink->GetDebugInfo(aInfo);
   }
-  return std::move(str);
 }
 
 }  // namespace mozilla
--- a/dom/media/mediasink/AudioSinkWrapper.h
+++ b/dom/media/mediasink/AudioSinkWrapper.h
@@ -72,17 +72,17 @@ class AudioSinkWrapper : public MediaSin
   nsresult Start(const media::TimeUnit& aStartTime,
                  const MediaInfo& aInfo) override;
   void Stop() override;
   bool IsStarted() const override;
   bool IsPlaying() const override;
 
   void Shutdown() override;
 
-  nsCString GetDebugInfo() override;
+  void GetDebugInfo(dom::MediaSinkDebugInfo& aInfo) override;
 
  private:
   virtual ~AudioSinkWrapper();
 
   void AssertOwnerThread() const {
     MOZ_ASSERT(mOwnerThread->IsCurrentThreadIn());
   }
 
--- a/dom/media/mediasink/DecodedStream.cpp
+++ b/dom/media/mediasink/DecodedStream.cpp
@@ -236,27 +236,27 @@ void DecodedStreamTrackListener::NotifyE
 /*
  * All MediaStream-related data is protected by the decoder's monitor.
  * We have at most one DecodedStreamDaata per MediaDecoder. Its stream
  * is used as the input for each ProcessedMediaStream created by calls to
  * captureStream(UntilEnded). Seeking creates a new source stream, as does
  * replaying after the input as ended. In the latter case, the new source is
  * not connected to streams created by captureStreamUntilEnded.
  */
-class DecodedStreamData {
+class DecodedStreamData final {
  public:
   DecodedStreamData(
       OutputStreamManager* aOutputStreamManager, PlaybackInfoInit&& aInit,
       MozPromiseHolder<DecodedStream::EndedPromise>&& aAudioEndedPromise,
       MozPromiseHolder<DecodedStream::EndedPromise>&& aVideoEndedPromise,
       AbstractThread* aMainThread);
   ~DecodedStreamData();
   MediaEventSource<int64_t>& OnOutput();
   void Forget();
-  nsCString GetDebugInfo();
+  void GetDebugInfo(dom::DecodedStreamDataDebugInfo& aInfo);
 
   void WriteVideoToSegment(layers::Image* aImage, const TimeUnit& aStart,
                            const TimeUnit& aEnd,
                            const gfx::IntSize& aIntrinsicSize,
                            const TimeStamp& aTimeStamp, VideoSegment* aOutput,
                            const PrincipalHandle& aPrincipalHandle);
 
   /* The following group of fields are protected by the decoder's monitor
@@ -332,30 +332,29 @@ DecodedStreamData::DecodedStreamData(
 DecodedStreamData::~DecodedStreamData() { MOZ_ASSERT(NS_IsMainThread()); }
 
 MediaEventSource<int64_t>& DecodedStreamData::OnOutput() {
   return mListener->OnOutput();
 }
 
 void DecodedStreamData::Forget() { mListener->Forget(); }
 
-nsCString DecodedStreamData::GetDebugInfo() {
-  return nsPrintfCString(
-      "DecodedStreamData=%p mAudioFramesWritten=%" PRId64
-      " mStreamAudioWritten=%" PRId64 " mStreamVideoWritten=%" PRId64
-      " mNextAudioTime=%" PRId64 " mLastVideoStartTime=%" PRId64
-      " mLastVideoEndTime=%" PRId64
-      " mHaveSentFinishAudio=%d mHaveSentFinishVideo=%d",
-      this, mAudioFramesWritten, mStreamAudioWritten, mStreamVideoWritten,
-      mNextAudioTime.ToMicroseconds(),
+void DecodedStreamData::GetDebugInfo(dom::DecodedStreamDataDebugInfo& aInfo) {
+  aInfo.mInstance = NS_ConvertUTF8toUTF16(nsPrintfCString("%p", this));
+  aInfo.mAudioFramesWritten = mAudioFramesWritten;
+  aInfo.mStreamAudioWritten = mStreamAudioWritten;
+  aInfo.mNextAudioTime = mNextAudioTime.ToMicroseconds();
+  aInfo.mLastVideoStartTime =
       mLastVideoStartTime.valueOr(TimeUnit::FromMicroseconds(-1))
-          .ToMicroseconds(),
+          .ToMicroseconds();
+  aInfo.mLastVideoEndTime =
       mLastVideoEndTime.valueOr(TimeUnit::FromMicroseconds(-1))
-          .ToMicroseconds(),
-      mHaveSentFinishAudio, mHaveSentFinishVideo);
+          .ToMicroseconds();
+  aInfo.mHaveSentFinishAudio = mHaveSentFinishAudio;
+  aInfo.mHaveSentFinishVideo = mHaveSentFinishVideo;
 }
 
 DecodedStream::DecodedStream(AbstractThread* aOwnerThread,
                              AbstractThread* aMainThread,
                              MediaQueue<AudioData>& aAudioQueue,
                              MediaQueue<VideoData>& aVideoQueue,
                              OutputStreamManager* aOutputStreamManager,
                              const bool& aSameOrigin)
@@ -953,26 +952,26 @@ void DecodedStream::DisconnectListener()
   AssertOwnerThread();
 
   mAudioPushListener.Disconnect();
   mVideoPushListener.Disconnect();
   mAudioFinishListener.Disconnect();
   mVideoFinishListener.Disconnect();
 }
 
-nsCString DecodedStream::GetDebugInfo() {
+void DecodedStream::GetDebugInfo(dom::MediaSinkDebugInfo& aInfo) {
   AssertOwnerThread();
   int64_t startTime = mStartTime.isSome() ? mStartTime->ToMicroseconds() : -1;
+  aInfo.mDecodedStream.mInstance =
+      NS_ConvertUTF8toUTF16(nsPrintfCString("%p", this));
+  aInfo.mDecodedStream.mStartTime = startTime;
+  aInfo.mDecodedStream.mLastOutputTime = mLastOutputTime.ToMicroseconds();
+  aInfo.mDecodedStream.mPlaying = mPlaying.Ref();
   auto lastAudio = mAudioQueue.PeekBack();
-  auto str = nsPrintfCString(
-      "DecodedStream=%p mStartTime=%" PRId64 " mLastOutputTime=%" PRId64
-      " mPlaying=%d AudioQueue(finished=%d size=%zu lastEndTime=%" PRId64
-      ") mData=%p",
-      this, startTime, mLastOutputTime.ToMicroseconds(), mPlaying.Ref(),
-      mAudioQueue.IsFinished(), mAudioQueue.GetSize(),
-      lastAudio ? lastAudio->GetEndTime().ToMicroseconds() : -1, mData.get());
+  aInfo.mDecodedStream.mLastAudio = lastAudio ? lastAudio->GetEndTime().ToMicroseconds(): -1;
+  aInfo.mDecodedStream.mAudioQueueFinished = mAudioQueue.IsFinished();
+  aInfo.mDecodedStream.mAudioQueueSize = mAudioQueue.GetSize();
   if (mData) {
-    AppendStringIfNotEmpty(str, mData->GetDebugInfo());
+    mData->GetDebugInfo(aInfo.mDecodedStream.mData);
   }
-  return std::move(str);
 }
 
 }  // namespace mozilla
--- a/dom/media/mediasink/DecodedStream.h
+++ b/dom/media/mediasink/DecodedStream.h
@@ -61,18 +61,17 @@ class DecodedStream : public MediaSink {
   void SetPlaying(bool aPlaying) override;
 
   nsresult Start(const media::TimeUnit& aStartTime,
                  const MediaInfo& aInfo) override;
   void Stop() override;
   bool IsStarted() const override;
   bool IsPlaying() const override;
   void Shutdown() override;
-
-  nsCString GetDebugInfo() override;
+  void GetDebugInfo(dom::MediaSinkDebugInfo& aInfo) override;
 
  protected:
   virtual ~DecodedStream();
 
  private:
   void DestroyData(UniquePtr<DecodedStreamData>&& aData);
   void SendAudio(double aVolume, bool aIsSameOrigin,
                  const PrincipalHandle& aPrincipalHandle);
--- a/dom/media/mediasink/MediaSink.h
+++ b/dom/media/mediasink/MediaSink.h
@@ -6,16 +6,17 @@
 
 #ifndef MediaSink_h_
 #define MediaSink_h_
 
 #include "AudioDeviceInfo.h"
 #include "MediaInfo.h"
 #include "mozilla/MozPromise.h"
 #include "mozilla/RefPtr.h"
+#include "mozilla/dom/MediaDebugInfoBinding.h"
 #include "nsISupportsImpl.h"
 
 namespace mozilla {
 
 class TimeStamp;
 class VideoFrameContainer;
 
 /**
@@ -124,19 +125,17 @@ class MediaSink {
   // Called on the state machine thread to shut down the sink. All resources
   // allocated by this sink should be released.
   // Must be called after playback stopped.
   virtual void Shutdown() {}
 
   virtual void SetSecondaryVideoContainer(VideoFrameContainer* aSecondary) {}
   virtual void ClearSecondaryVideoContainer() {}
 
-  // Return a string containing debugging information.
-  // Can be called in any phase.
-  virtual nsCString GetDebugInfo() { return nsCString(); }
+  virtual void GetDebugInfo(dom::MediaSinkDebugInfo& aInfo) {}
 
  protected:
   virtual ~MediaSink() = default;
 };
 
 }  // namespace mozilla
 
 #endif  // MediaSink_h_
--- a/dom/media/mediasink/VideoSink.cpp
+++ b/dom/media/mediasink/VideoSink.cpp
@@ -660,28 +660,27 @@ void VideoSink::SetSecondaryVideoContain
   }
 }
 
 void VideoSink::ClearSecondaryVideoContainer() {
   AssertOwnerThread();
   mSecondaryContainer = nullptr;
 }
 
-nsCString VideoSink::GetDebugInfo() {
+void VideoSink::GetDebugInfo(dom::MediaSinkDebugInfo& aInfo) {
   AssertOwnerThread();
-  auto str = nsPrintfCString(
-      "VideoSink: IsStarted=%d IsPlaying=%d VideoQueue(finished=%d "
-      "size=%zu) mVideoFrameEndTime=%" PRId64
-      " mHasVideo=%d "
-      "mVideoSinkEndRequest.Exists()=%d mEndPromiseHolder.IsEmpty()=%d",
-      IsStarted(), IsPlaying(), VideoQueue().IsFinished(),
-      VideoQueue().GetSize(), mVideoFrameEndTime.ToMicroseconds(), mHasVideo,
-      mVideoSinkEndRequest.Exists(), mEndPromiseHolder.IsEmpty());
-  AppendStringIfNotEmpty(str, mAudioSink->GetDebugInfo());
-  return std::move(str);
+  aInfo.mVideoSink.mIsStarted = IsStarted();
+  aInfo.mVideoSink.mIsPlaying = IsPlaying();
+  aInfo.mVideoSink.mFinished = VideoQueue().IsFinished();
+  aInfo.mVideoSink.mSize = VideoQueue().GetSize();
+  aInfo.mVideoSink.mVideoFrameEndTime = mVideoFrameEndTime.ToMicroseconds();
+  aInfo.mVideoSink.mHasVideo = mHasVideo;
+  aInfo.mVideoSink.mVideoSinkEndRequestExists = mVideoSinkEndRequest.Exists();
+  aInfo.mVideoSink.mEndPromiseHolderIsEmpty = mEndPromiseHolder.IsEmpty();
+  mAudioSink->GetDebugInfo(aInfo);
 }
 
 bool VideoSink::InitializeBlankImage() {
   mBlankImage = mContainer->GetImageContainer()->CreatePlanarYCbCrImage();
   if (mBlankImage == nullptr) {
     return false;
   }
   SetImageToGreenPixel(mBlankImage->AsPlanarYCbCrImage());
--- a/dom/media/mediasink/VideoSink.h
+++ b/dom/media/mediasink/VideoSink.h
@@ -62,17 +62,17 @@ class VideoSink : public MediaSink {
 
   bool IsPlaying() const override;
 
   void Shutdown() override;
 
   void SetSecondaryVideoContainer(VideoFrameContainer* aSecondary) override;
   void ClearSecondaryVideoContainer() override;
 
-  nsCString GetDebugInfo() override;
+  void GetDebugInfo(dom::MediaSinkDebugInfo& aInfo) override;
 
  private:
   virtual ~VideoSink();
 
   // VideoQueue listener related.
   void OnVideoQueuePushed(RefPtr<VideoData>&& aSample);
   void OnVideoQueueFinished();
   void ConnectListener();
--- a/dom/media/mediasource/MediaSource.cpp
+++ b/dom/media/mediasource/MediaSource.cpp
@@ -610,20 +610,32 @@ void MediaSource::DurationChange(double 
   //    4.1 Update new duration to equal highest end time.
   aNewDuration = std::max(aNewDuration, highestEndTime);
 
   // 5. Update the media duration to new duration and run the HTMLMediaElement
   // duration change algorithm.
   mDecoder->SetMediaSourceDuration(aNewDuration);
 }
 
-void MediaSource::GetMozDebugReaderData(nsAString& aString) {
-  nsAutoCString result;
-  mDecoder->GetMozDebugReaderData(result);
-  aString = NS_ConvertUTF8toUTF16(result);
+already_AddRefed<Promise> MediaSource::MozDebugReaderData(ErrorResult& aRv) {
+  // Creating a JS promise
+  nsPIDOMWindowInner* win = GetOwner();
+  if (!win) {
+    aRv.Throw(NS_ERROR_UNEXPECTED);
+    return nullptr;
+  }
+  RefPtr<Promise> domPromise = Promise::Create(win->AsGlobal(), aRv);
+  if (NS_WARN_IF(aRv.Failed())) {
+    return nullptr;
+  }
+  MOZ_ASSERT(domPromise);
+  MediaSourceDecoderDebugInfo info;
+  mDecoder->GetDebugInfo(info);
+  domPromise->MaybeResolve(info);
+  return domPromise.forget();
 }
 
 nsPIDOMWindowInner* MediaSource::GetParentObject() const { return GetOwner(); }
 
 JSObject* MediaSource::WrapObject(JSContext* aCx,
                                   JS::Handle<JSObject*> aGivenProto) {
   return MediaSource_Binding::Wrap(aCx, this, aGivenProto);
 }
--- a/dom/media/mediasource/MediaSource.h
+++ b/dom/media/mediasource/MediaSource.h
@@ -109,19 +109,19 @@ class MediaSource final : public DOMEven
   // Set mReadyState to aState and fire the required events at the MediaSource.
   void SetReadyState(MediaSourceReadyState aState);
 
   // Used by SourceBuffer to call CreateSubDecoder.
   MediaSourceDecoder* GetDecoder() { return mDecoder; }
 
   nsIPrincipal* GetPrincipal() { return mPrincipal; }
 
-  // Returns a string describing the state of the MediaSource internal
+  // Returns a structure describing the state of the MediaSource internal
   // buffered data. Used for debugging purposes.
-  void GetMozDebugReaderData(nsAString& aString);
+  already_AddRefed<Promise> MozDebugReaderData(ErrorResult& aRv);
 
   bool HasLiveSeekableRange() const { return mLiveSeekableRange.isSome(); }
   media::TimeInterval LiveSeekableRange() const {
     return mLiveSeekableRange.value();
   }
 
   AbstractThread* AbstractMainThread() const { return mAbstractMainThread; }
 
--- a/dom/media/mediasource/MediaSourceDecoder.cpp
+++ b/dom/media/mediasource/MediaSourceDecoder.cpp
@@ -230,21 +230,20 @@ void MediaSourceDecoder::SetMediaSourceD
       checkedDuration = INT64_MAX - 1;
     }
     SetExplicitDuration(aDuration);
   } else {
     SetExplicitDuration(PositiveInfinity<double>());
   }
 }
 
-void MediaSourceDecoder::GetMozDebugReaderData(nsACString& aString) {
-  aString += NS_LITERAL_CSTRING("Container Type: MediaSource\n");
+void MediaSourceDecoder::GetDebugInfo(dom::MediaSourceDecoderDebugInfo& aInfo) {
   if (mReader && mDemuxer) {
-    mReader->GetMozDebugReaderData(aString);
-    mDemuxer->GetMozDebugReaderData(aString);
+    mReader->GetDebugInfo(aInfo.mReader);
+    mDemuxer->GetDebugInfo(aInfo.mDemuxer);
   }
 }
 
 double MediaSourceDecoder::GetDuration() {
   MOZ_ASSERT(NS_IsMainThread());
   AbstractThread::AutoEnter context(AbstractMainThread());
   return ExplicitDuration();
 }
--- a/dom/media/mediasource/MediaSourceDecoder.h
+++ b/dom/media/mediasource/MediaSourceDecoder.h
@@ -4,16 +4,17 @@
  * License, v. 2.0. If a copy of the MPL was not distributed with this
  * file, You can obtain one at http://mozilla.org/MPL/2.0/. */
 
 #ifndef MOZILLA_MEDIASOURCEDECODER_H_
 #define MOZILLA_MEDIASOURCEDECODER_H_
 
 #include "MediaDecoder.h"
 #include "mozilla/RefPtr.h"
+#include "mozilla/dom/MediaDebugInfoBinding.h"
 
 namespace mozilla {
 
 class MediaDecoderStateMachine;
 class MediaSourceDemuxer;
 
 namespace dom {
 
@@ -46,19 +47,19 @@ class MediaSourceDecoder : public MediaD
   void SetMediaSourceDuration(double aDuration);
 
   MediaSourceDemuxer* GetDemuxer() { return mDemuxer; }
 
   already_AddRefed<nsIPrincipal> GetCurrentPrincipal() override;
 
   bool IsTransportSeekable() override { return true; }
 
-  // Returns a string describing the state of the MediaSource internal
+  // Returns a structure describing the state of the MediaSource internal
   // buffered data. Used for debugging purposes.
-  void GetMozDebugReaderData(nsACString& aString) override;
+  void GetDebugInfo(dom::MediaSourceDecoderDebugInfo& aInfo);
 
   void AddSizeOfResources(ResourceSizes* aSizes) override;
 
   MediaDecoderOwner::NextFrameStatus NextFrameBufferedStatus() override;
 
   bool IsMSE() const override { return true; }
 
   void NotifyInitDataArrived();
--- a/dom/media/mediasource/MediaSourceDemuxer.cpp
+++ b/dom/media/mediasource/MediaSourceDemuxer.cpp
@@ -225,59 +225,24 @@ RefPtr<TrackBuffersManager> MediaSourceD
       return nullptr;
   }
 }
 
 MediaSourceDemuxer::~MediaSourceDemuxer() {
   mInitPromise.RejectIfExists(NS_ERROR_DOM_MEDIA_CANCELED, __func__);
 }
 
-void MediaSourceDemuxer::GetMozDebugReaderData(nsACString& aString) {
+void MediaSourceDemuxer::GetDebugInfo(dom::MediaSourceDemuxerDebugInfo& aInfo) {
   MonitorAutoLock mon(mMonitor);
-  nsAutoCString result;
-  result += nsPrintfCString("Dumping Data for Demuxer: %p\n", this);
   if (mAudioTrack) {
-    result += nsPrintfCString(
-        "\tDumping Audio Track Buffer(%s): mLastAudioTime=%f\n"
-        "\t\tAudio Track Buffer Details: NumSamples=%zu"
-        " Size=%u Evictable=%u "
-        "NextGetSampleIndex=%u NextInsertionIndex=%d\n",
-        mAudioTrack->mType.Type().AsString().get(),
-        mAudioTrack->mAudioTracks.mNextSampleTime.ToSeconds(),
-        mAudioTrack->mAudioTracks.mBuffers[0].Length(),
-        mAudioTrack->mAudioTracks.mSizeBuffer,
-        mAudioTrack->Evictable(TrackInfo::kAudioTrack),
-        mAudioTrack->mAudioTracks.mNextGetSampleIndex.valueOr(-1),
-        mAudioTrack->mAudioTracks.mNextInsertionIndex.valueOr(-1));
-
-    result += nsPrintfCString(
-        "\t\tAudio Track Buffered: ranges=%s\n",
-        DumpTimeRanges(mAudioTrack->SafeBuffered(TrackInfo::kAudioTrack))
-            .get());
+    mAudioTrack->GetDebugInfo(aInfo.mAudioTrack);
   }
   if (mVideoTrack) {
-    result += nsPrintfCString(
-        "\tDumping Video Track Buffer(%s): mLastVideoTime=%f\n"
-        "\t\tVideo Track Buffer Details: NumSamples=%zu"
-        " Size=%u Evictable=%u "
-        "NextGetSampleIndex=%u NextInsertionIndex=%d\n",
-        mVideoTrack->mType.Type().AsString().get(),
-        mVideoTrack->mVideoTracks.mNextSampleTime.ToSeconds(),
-        mVideoTrack->mVideoTracks.mBuffers[0].Length(),
-        mVideoTrack->mVideoTracks.mSizeBuffer,
-        mVideoTrack->Evictable(TrackInfo::kVideoTrack),
-        mVideoTrack->mVideoTracks.mNextGetSampleIndex.valueOr(-1),
-        mVideoTrack->mVideoTracks.mNextInsertionIndex.valueOr(-1));
-
-    result += nsPrintfCString(
-        "\t\tVideo Track Buffered: ranges=%s\n",
-        DumpTimeRanges(mVideoTrack->SafeBuffered(TrackInfo::kVideoTrack))
-            .get());
+    mVideoTrack->GetDebugInfo(aInfo.mVideoTrack);
   }
-  aString += result;
 }
 
 MediaSourceTrackDemuxer::MediaSourceTrackDemuxer(MediaSourceDemuxer* aParent,
                                                  TrackInfo::TrackType aType,
                                                  TrackBuffersManager* aManager)
     : mParent(aParent),
       mType(aType),
       mMonitor("MediaSourceTrackDemuxer"),
--- a/dom/media/mediasource/MediaSourceDemuxer.h
+++ b/dom/media/mediasource/MediaSourceDemuxer.h
@@ -10,16 +10,17 @@
 #  include "MediaDataDemuxer.h"
 #  include "MediaResource.h"
 #  include "MediaSource.h"
 #  include "TrackBuffersManager.h"
 #  include "mozilla/Atomics.h"
 #  include "mozilla/Maybe.h"
 #  include "mozilla/Monitor.h"
 #  include "mozilla/TaskQueue.h"
+#  include "mozilla/dom/MediaDebugInfoBinding.h"
 
 namespace mozilla {
 
 class AbstractThread;
 class MediaResult;
 class MediaSourceTrackDemuxer;
 
 DDLoggedTypeDeclNameAndBase(MediaSourceDemuxer, MediaDataDemuxer);
@@ -44,19 +45,19 @@ class MediaSourceDemuxer : public MediaD
   bool ShouldComputeStartTime() const override { return false; }
 
   /* interface for TrackBuffersManager */
   void AttachSourceBuffer(RefPtr<TrackBuffersManager>& aSourceBuffer);
   void DetachSourceBuffer(RefPtr<TrackBuffersManager>& aSourceBuffer);
   TaskQueue* GetTaskQueue() { return mTaskQueue; }
   void NotifyInitDataArrived();
 
-  // Returns a string describing the state of the MediaSource internal
+  // Returns a structure describing the state of the MediaSource internal
   // buffered data. Used for debugging purposes.
-  void GetMozDebugReaderData(nsACString& aString);
+  void GetDebugInfo(dom::MediaSourceDemuxerDebugInfo& aInfo);
 
   void AddSizeOfResources(MediaSourceDecoder::ResourceSizes* aSizes);
 
   // Gap allowed between frames.
   // Due to inaccuracies in determining buffer end
   // frames (Bug 1065207). This value is based on videos seen in the wild.
   static constexpr media::TimeUnit EOS_FUZZ =
       media::TimeUnit::FromMicroseconds(500000);
--- a/dom/media/mediasource/TrackBuffersManager.cpp
+++ b/dom/media/mediasource/TrackBuffersManager.cpp
@@ -2761,16 +2761,61 @@ void TrackBuffersManager::TrackData::Add
     MediaSourceDecoder::ResourceSizes* aSizes) const {
   for (const TrackBuffer& buffer : mBuffers) {
     for (const MediaRawData* data : buffer) {
       aSizes->mByteSize += data->SizeOfIncludingThis(aSizes->mMallocSizeOf);
     }
   }
 }
 
+void TrackBuffersManager::GetDebugInfo(
+    dom::TrackBuffersManagerDebugInfo& aInfo) {
+  aInfo.mType = NS_ConvertUTF8toUTF16(mType.Type().AsString());
+
+  if (HasAudio()) {
+    aInfo.mNextSampleTime = mAudioTracks.mNextSampleTime.ToSeconds();
+    aInfo.mNumSamples = mAudioTracks.mBuffers[0].Length();
+    aInfo.mBufferSize = mAudioTracks.mSizeBuffer;
+    aInfo.mEvictable = Evictable(TrackInfo::kAudioTrack);
+    aInfo.mNextGetSampleIndex = mAudioTracks.mNextGetSampleIndex.valueOr(-1);
+    aInfo.mNextInsertionIndex = mAudioTracks.mNextInsertionIndex.valueOr(-1);
+    media::TimeIntervals ranges = SafeBuffered(TrackInfo::kAudioTrack);
+    dom::Sequence<dom::BufferRange> items;
+    for (uint32_t i = 0; i < ranges.Length(); ++i) {
+      // dom::Sequence is a FallibleTArray
+      dom::BufferRange* range = items.AppendElement(fallible);
+      if (!range) {
+        break;
+      }
+      range->mStart = ranges.Start(i).ToSeconds();
+      range->mEnd = ranges.End(i).ToSeconds();
+    }
+    aInfo.mRanges = items;
+  } else if (HasVideo()) {
+    aInfo.mNextSampleTime = mVideoTracks.mNextSampleTime.ToSeconds();
+    aInfo.mNumSamples = mVideoTracks.mBuffers[0].Length();
+    aInfo.mBufferSize = mVideoTracks.mSizeBuffer;
+    aInfo.mEvictable = Evictable(TrackInfo::kVideoTrack);
+    aInfo.mNextGetSampleIndex = mVideoTracks.mNextGetSampleIndex.valueOr(-1);
+    aInfo.mNextInsertionIndex = mVideoTracks.mNextInsertionIndex.valueOr(-1);
+    media::TimeIntervals ranges = SafeBuffered(TrackInfo::kVideoTrack);
+    dom::Sequence<dom::BufferRange> items;
+    for (uint32_t i = 0; i < ranges.Length(); ++i) {
+      // dom::Sequence is a FallibleTArray
+      dom::BufferRange* range = items.AppendElement(fallible);
+      if (!range) {
+        break;
+      }
+      range->mStart = ranges.Start(i).ToSeconds();
+      range->mEnd = ranges.End(i).ToSeconds();
+    }
+    aInfo.mRanges = items;
+  }
+}
+
 void TrackBuffersManager::AddSizeOfResources(
     MediaSourceDecoder::ResourceSizes* aSizes) const {
   MOZ_ASSERT(OnTaskQueue());
   mVideoTracks.AddSizeOfResources(aSizes);
   mAudioTracks.AddSizeOfResources(aSizes);
 }
 
 }  // namespace mozilla
--- a/dom/media/mediasource/TrackBuffersManager.h
+++ b/dom/media/mediasource/TrackBuffersManager.h
@@ -7,16 +7,17 @@
 #ifndef MOZILLA_TRACKBUFFERSMANAGER_H_
 #define MOZILLA_TRACKBUFFERSMANAGER_H_
 
 #include "mozilla/Atomics.h"
 #include "mozilla/Maybe.h"
 #include "mozilla/Mutex.h"
 #include "mozilla/NotNull.h"
 #include "mozilla/TaskQueue.h"
+#include "mozilla/dom/MediaDebugInfoBinding.h"
 
 #include "MediaContainerType.h"
 #include "MediaData.h"
 #include "MediaDataDemuxer.h"
 #include "MediaResult.h"
 #include "MediaSourceDecoder.h"
 #include "SourceBufferTask.h"
 #include "TimeUnits.h"
@@ -56,17 +57,17 @@ class SourceBufferTaskQueue {
   }
 
  private:
   nsTArray<RefPtr<SourceBufferTask>> mQueue;
 };
 
 DDLoggedTypeDeclName(TrackBuffersManager);
 
-class TrackBuffersManager
+class TrackBuffersManager final
     : public DecoderDoctorLifeLogger<TrackBuffersManager> {
  public:
   NS_INLINE_DECL_THREADSAFE_REFCOUNTING(TrackBuffersManager);
 
   enum class EvictDataResult : int8_t {
     NO_DATA_EVICTED,
     CANT_EVICT,
     BUFFER_FULL,
@@ -160,23 +161,22 @@ class TrackBuffersManager
   // timecode or is empty.
   nsresult SetNextGetSampleIndexIfNeeded(TrackInfo::TrackType aTrack,
                                          const media::TimeUnit& aFuzz);
 
   media::TimeUnit GetNextRandomAccessPoint(TrackInfo::TrackType aTrack,
                                            const media::TimeUnit& aFuzz);
 
   void AddSizeOfResources(MediaSourceDecoder::ResourceSizes* aSizes) const;
+  void GetDebugInfo(dom::TrackBuffersManagerDebugInfo& aInfo);
 
  private:
   typedef MozPromise<bool, MediaResult, /* IsExclusive = */ true>
       CodedFrameProcessingPromise;
 
-  // for MediaSourceDemuxer::GetMozDebugReaderData
-  friend class MediaSourceDemuxer;
   ~TrackBuffersManager();
   // All following functions run on the taskqueue.
   RefPtr<AppendPromise> DoAppendData(already_AddRefed<MediaByteBuffer> aData,
                                      const SourceBufferAttributes& aAttributes);
   void ScheduleSegmentParserLoop();
   void SegmentParserLoop();
   void InitializationSegmentReceived();
   void ShutdownDemuxers();
--- a/dom/media/test/marionette/yttest/support.py
+++ b/dom/media/test/marionette/yttest/support.py
@@ -89,21 +89,20 @@ class VideoStreamTestCase(MarionetteTest
             with using_page(video_id, self.marionette, **options) as page:
                 yield page
 
     def assertVideoQuality(self, res):
         self.assertTrue(res is not None, "We did not get back the results")
         debug_info = res["mozRequestDebugInfo"]
 
         # looking at mNumSamplesOutputTotal vs mNumSamplesSkippedTotal
-        decoded, skipped = debug_info["Video Frames Decoded"].split(" ", 1)
-        decoded = int(decoded)
-        skipped = int(skipped.split("=")[-1][:-1])
-        self.assertLess(skipped, decoded * 0.04)
+        reader_info = debug_info['decoder']['reader']
+        self.assertLess(reader_info["videoNumSamplesSkippedTotal"],
+                        reader_info["videoNumSamplesOutputTotal"] * 0.04)
 
         # extracting in/out from the debugInfo
-        video_state = debug_info["Video State"]
-        video_in = int(video_state["in"])
-        video_out = int(video_state["out"])
+        video_state = reader_info["videoState"]
+        video_in = video_state["numSamplesInput"]
+        video_out = video_state["numSamplesOutput"]
         # what's the ratio ? we want 99%+
         if video_out != video_in:
             in_out_ratio = float(video_out) / float(video_in) * 100
             self.assertGreater(in_out_ratio, 99.0)
--- a/dom/media/test/marionette/yttest/ytpage.py
+++ b/dom/media/test/marionette/yttest/ytpage.py
@@ -3,17 +3,16 @@
 # You can obtain one at http://mozilla.org/MPL/2.0/.
 """
 Drives the browser during the playback test.
 """
 import contextlib
 import os
 import time
 import json
-import re
 
 from marionette_driver.by import By
 
 
 here = os.path.dirname(__file__)
 js = os.path.join(here, "until_end_test.js")
 with open(js) as f:
     UNTIL_END_TEST = f.read()
@@ -81,57 +80,25 @@ class YoutubePage:
         options.update(self.options)
         if "duration" in options:
             script = DURATION_TEST % options
         else:
             script = UNTIL_END_TEST % options
         res = self.execute_async_script(script)
         if res is None:
             return res
-        res = self._parse_res(res)
         self._dump_res(res)
         return res
 
     def execute_async_script(self, script, context=None):
         if context is None:
             context = self.marionette.CONTEXT_CONTENT
         with self.marionette.using_context(context):
             return self.marionette.execute_async_script(script, sandbox="system")
 
-    def _parse_res(self, res):
-        debug_info = {}
-        # The parsing won't be necessary once we have bug 1542674
-        for key, value in res["mozRequestDebugInfo"].items():
-            key, value = key.strip(), value.strip()
-            if key.startswith(SPLIT_FIELD):
-                value_dict = {}
-                for field in re.findall(r"\S+\(.+\)\s|\S+", value):
-                    field = field.strip()
-                    if field == "":
-                        continue
-                    if field.startswith("VideoQueue"):
-                        k = "VideoQueue"
-                        v = field[len("VideoQueue(") : -2]  # noqa: E203
-                        fields = {}
-                        v = v.split(" ")
-                        for h in v:
-                            f, vv = h.split("=")
-                            fields[f] = vv
-                        v = fields
-                    else:
-                        if "=" in field:
-                            k, v = field.split("=", 1)
-                        else:
-                            k, v = field.split(":", 1)
-                    value_dict[k] = v
-                value = value_dict
-            debug_info[key] = value
-        res["mozRequestDebugInfo"] = debug_info
-        return res
-
     def _dump_res(self, res):
         raw = json.dumps(res, indent=2, sort_keys=True)
         print(raw)
         if "upload_dir" in self.options:
             fn = "%s-videoPlaybackQuality.json" % self.video_id
             fn = os.path.join(self.options["upload_dir"], fn)
             # dumping on disk
             with open(fn, "w") as f:
--- a/dom/webidl/HTMLMediaElement.webidl
+++ b/dom/webidl/HTMLMediaElement.webidl
@@ -97,31 +97,26 @@ interface HTMLMediaElement : HTMLElement
                          optional DOMString label = "",
                          optional DOMString language = "");
 };
 
 // Mozilla extensions:
 partial interface HTMLMediaElement {
   [Func="HasDebuggerOrTabsPrivilege"]
   readonly attribute MediaSource? mozMediaSourceObject;
-  [Func="HasDebuggerOrTabsPrivilege"]
-  readonly attribute DOMString mozDebugReaderData;
+
   [Func="HasDebuggerOrTabsPrivilege", NewObject]
-  Promise<DOMString> mozRequestDebugInfo();
+  Promise<HTMLMediaElementDebugInfo> mozRequestDebugInfo();
 
   [Func="HasDebuggerOrTabsPrivilege", NewObject]
   static void mozEnableDebugLog();
   [Func="HasDebuggerOrTabsPrivilege", NewObject]
   Promise<DOMString> mozRequestDebugLog();
 
-  [Pref="media.test.dumpDebugInfo"]
-  Promise<void> mozDumpDebugInfo();
-
   attribute MediaStream? srcObject;
-
   attribute boolean mozPreservesPitch;
 
   // NB: for internal use with the video controls:
   [Func="IsChromeOrXBLOrUAWidget"] attribute boolean mozAllowCasting;
   [Func="IsChromeOrXBLOrUAWidget"] attribute boolean mozIsCasting;
 
   // Mozilla extension: stream capture
   [Throws]
new file mode 100644
--- /dev/null
+++ b/dom/webidl/MediaDebugInfo.webidl
@@ -0,0 +1,235 @@
+/* -*- Mode: IDL; tab-width: 2; indent-tabs-mode: nil; c-basic-offset: 2 -*- */
+/* This Source Code Form is subject to the terms of the Mozilla Public
+ * License, v. 2.0. If a copy of the MPL was not distributed with this file,
+ * You can obtain one at http://mozilla.org/MPL/2.0/.
+ */
+
+/*
+ * This module defines dictonaries that are filled with debug information
+ * through GetDebugInfo() calls in the media component. To get the information
+ * filled and returned, we have two methods that return promises, one in
+ * HTMLMediaElement and one in MediaSource.
+ *
+ * If you need to add some extra info, there's one dictionary per class,
+ * following the pattern <ClassName>DebugInfo, where you can add some fields
+ * and fill them in the corresponding GetDebugInfo() call.
+ *
+ * Below is the structures returned.
+ *
+ * Used by HTMLMediaElement.GetMozRequestDebugInfo(), see HTMLMediaElement.webidl:
+ *
+ * HTMLMediaElementDebugInfo
+ *   EMEDebugInfo
+ *   MediaDecoderDebugInfo
+ *     MediaFormatReaderDebugInfo
+ *       MediaStateDebugInfo
+ *       MediaStateDebugInfo
+ *       MediaFrameStats
+ *     MediaDecoderStateMachineDebugInfo
+ *       MediaDecoderStateMachineDecodingStateDebugInfo
+ *       MediaSinkDebugInfo
+ *         VideoSinkDebugInfo
+ *         AudioSinkDebugInfo
+ *         DecodedStreamDebugInfo
+ *           DecodedStreamDataDebugInfo
+ *     MediaResourceDebugInfo
+ *       MediaCacheStreamDebugInfo
+ *
+ * Used by MediaSource.GetMozDebugReaderData(), see MediaSource.webidl:
+ *
+ * MediaSourceDecoderDebugInfo
+ *  MediaFormatReaderDebugInfo
+ *    MediaStateDebugInfo
+ *    MediaStateDebugInfo
+ *    MediaFrameStats
+ *  MediaSourceDemuxerDebugInfo
+ *    TrackBuffersManagerDebugInfo
+ *    TrackBuffersManagerDebugInfo
+ */
+dictionary MediaCacheStreamDebugInfo {
+  long long streamLength = 0;
+  long long channelOffset = 0;
+  boolean cacheSuspended = false;
+  boolean channelEnded = false;
+  long loadID = 0;
+};
+
+dictionary MediaResourceDebugInfo {
+  required MediaCacheStreamDebugInfo cacheStream;
+};
+
+dictionary MediaDecoderDebugInfo {
+  DOMString instance = "";
+  unsigned long channels = 0;
+  unsigned long rate = 0;
+  boolean hasAudio = false;
+  boolean hasVideo = false;
+  DOMString PlayState = "";
+  DOMString containerType = "";
+  required MediaFormatReaderDebugInfo reader;
+  required MediaDecoderStateMachineDebugInfo stateMachine;
+  required MediaResourceDebugInfo resource;
+};
+
+dictionary AudioSinkDebugInfo {
+  long long startTime = 0;
+  long long lastGoodPosition = 0;
+  boolean isPlaying = false;
+  boolean isStarted = false;
+  boolean audioEnded = false;
+  long outputRate = 0;
+  long long written = 0;
+  boolean hasErrored = false;
+  boolean playbackComplete = false;
+};
+
+dictionary AudioSinkWrapperDebugInfo {
+  boolean isPlaying = false;
+  boolean isStarted = false;
+  boolean audioEnded = false;
+  required AudioSinkDebugInfo audioSink;
+};
+
+dictionary VideoSinkDebugInfo {
+  boolean isStarted = false;
+  boolean isPlaying = false;
+  boolean finished = false;
+  long size = 0;
+  long long videoFrameEndTime = 0;
+  boolean hasVideo = false;
+  boolean videoSinkEndRequestExists = false;
+  boolean endPromiseHolderIsEmpty = false;
+};
+
+dictionary DecodedStreamDataDebugInfo {
+  DOMString instance = "";
+  long long audioFramesWritten = 0;
+  long long streamAudioWritten = 0;
+  long long streamVideoWritten = 0;
+  long long nextAudioTime = 0;
+  long long lastVideoStartTime = 0;
+  long long lastVideoEndTime = 0;
+  boolean haveSentFinishAudio = false;
+  boolean haveSentFinishVideo = false;
+};
+
+dictionary DecodedStreamDebugInfo {
+  DOMString instance = "";
+  long long startTime = 0;
+  long long lastOutputTime = 0;
+  long playing = 0;
+  long long lastAudio = 0;
+  boolean audioQueueFinished = false;
+  long audioQueueSize = 0;
+  required DecodedStreamDataDebugInfo data;
+};
+
+dictionary MediaSinkDebugInfo {
+  required AudioSinkWrapperDebugInfo audioSinkWrapper;
+  required VideoSinkDebugInfo videoSink;
+  required DecodedStreamDebugInfo decodedStream;
+};
+
+dictionary MediaDecoderStateMachineDecodingStateDebugInfo {
+  boolean isPrerolling = false;
+};
+
+dictionary MediaDecoderStateMachineDebugInfo {
+  long long duration = 0;
+  long long mediaTime = 0;
+  long long clock = 0;
+  DOMString state = "";
+  long playState = 0;
+  boolean sentFirstFrameLoadedEvent = false;
+  boolean isPlaying = false;
+  DOMString audioRequestStatus = "";
+  DOMString videoRequestStatus = "";
+  long long decodedAudioEndTime = 0;
+  long long decodedVideoEndTime = 0;
+  boolean audioCompleted = false;
+  boolean videoCompleted = false;
+  required MediaDecoderStateMachineDecodingStateDebugInfo stateObj;
+  required MediaSinkDebugInfo mediaSink;
+};
+
+dictionary MediaStateDebugInfo {
+  boolean needInput = false;
+  boolean hasPromise = false;
+  boolean waitingPromise = false;
+  boolean hasDemuxRequest = false;
+  long demuxQueueSize = 0;
+  boolean hasDecoder = false;
+  double timeTreshold = 0.0;
+  boolean timeTresholdHasSeeked = false;
+  long long numSamplesInput = 0;
+  long long numSamplesOutput = 0;
+  long queueSize = 0;
+  long pending = 0;
+  long waitingForData = 0;
+  long demuxEOS = 0;
+  long drainState = 0;
+  long waitingForKey = 0;
+  long lastStreamSourceID = 0;
+};
+
+dictionary MediaFrameStats {
+  long long droppedDecodedFrames = 0;
+  long long droppedSinkFrames = 0;
+  long long droppedCompositorFrames = 0;
+};
+
+dictionary MediaFormatReaderDebugInfo {
+  DOMString videoType = "";
+  DOMString videoDecoderName = "";
+  long videoWidth = 0;
+  long videoHeight = 0;
+  double videoRate = 0.0;
+  DOMString audioType = "";
+  DOMString audioDecoderName = "";
+  boolean videoHardwareAccelerated = false;
+  long long videoNumSamplesOutputTotal = 0;
+  long long videoNumSamplesSkippedTotal = 0;
+  long audioChannels = 0;
+  double audioRate = 0.0;
+  long long audioFramesDecoded = 0;
+  required MediaStateDebugInfo audioState;
+  required MediaStateDebugInfo videoState;
+  required MediaFrameStats frameStats;
+};
+
+dictionary BufferRange {
+  double start = 0;
+  double end = 0;
+};
+
+dictionary TrackBuffersManagerDebugInfo {
+  DOMString type = "";
+  double nextSampleTime = 0.0;
+  long numSamples = 0;
+  long bufferSize = 0;
+  long evictable = 0;
+  long nextGetSampleIndex = 0;
+  long nextInsertionIndex = 0;
+  sequence<BufferRange> ranges = [];
+};
+
+dictionary MediaSourceDemuxerDebugInfo {
+  required TrackBuffersManagerDebugInfo audioTrack;
+  required TrackBuffersManagerDebugInfo videoTrack;
+};
+
+dictionary MediaSourceDecoderDebugInfo {
+  required MediaFormatReaderDebugInfo reader;
+  required MediaSourceDemuxerDebugInfo demuxer;
+};
+
+dictionary EMEDebugInfo {
+  DOMString keySystem = "";
+  DOMString sessionsInfo = "";
+};
+
+dictionary HTMLMediaElementDebugInfo {
+  long compositorDroppedFrames = 0;
+  required EMEDebugInfo EMEInfo;
+  required MediaDecoderDebugInfo decoder;
+};
--- a/dom/webidl/MediaSource.webidl
+++ b/dom/webidl/MediaSource.webidl
@@ -37,11 +37,11 @@ interface MediaSource : EventTarget {
   void removeSourceBuffer(SourceBuffer sourceBuffer);
   [Throws]
   void endOfStream(optional MediaSourceEndOfStreamError error);
   [Throws]
   void setLiveSeekableRange(double start, double end);
   [Throws]
   void clearLiveSeekableRange();
   static boolean isTypeSupported(DOMString type);
-  [ChromeOnly]
-  readonly attribute DOMString mozDebugReaderData;
+  [Throws, ChromeOnly]
+  Promise<MediaSourceDecoderDebugInfo> mozDebugReaderData();
 };
--- a/dom/webidl/moz.build
+++ b/dom/webidl/moz.build
@@ -635,16 +635,17 @@ WEBIDL_FILES = [
     'KeyframeEffect.webidl',
     'KeyIdsInitData.webidl',
     'L10nUtils.webidl',
     'LegacyQueryInterface.webidl',
     'LinkStyle.webidl',
     'LoadURIOptions.webidl',
     'Location.webidl',
     'MediaCapabilities.webidl',
+    'MediaDebugInfo.webidl',
     'MediaDeviceInfo.webidl',
     'MediaDevices.webidl',
     'MediaElementAudioSourceNode.webidl',
     'MediaEncryptedEvent.webidl',
     'MediaError.webidl',
     'MediaKeyError.webidl',
     'MediaKeyMessageEvent.webidl',
     'MediaKeys.webidl',