Merge inbound to mozilla-central. a=merge
authorOana Pop Rus <opoprus@mozilla.com>
Mon, 01 Jul 2019 12:53:24 +0300
changeset 540415 b3eb1ee3b85dfb87789c92d417e20f377ebcfca3
parent 540414 501f50ccafdfd30a6bc8d85f2be8704ca335d65f (current diff)
parent 540413 98d86b9328ef5cf0454b98385f056020a4569f72 (diff)
child 540416 11168f905222e9ea49de07fb73f1901e66e183e7
child 540430 26bf5c05734f636d58fd350b8758a849d9d12868
push id11529
push userarchaeopteryx@coole-files.de
push dateThu, 04 Jul 2019 15:22:33 +0000
treeherdermozilla-beta@ebb510a784b8 [default view] [failures only]
perfherder[talos] [build metrics] [platform microbench] (compared to previous push)
reviewersmerge
milestone69.0a1
first release with
nightly linux32
b3eb1ee3b85d / 69.0a1 / 20190701095406 / files
nightly linux64
b3eb1ee3b85d / 69.0a1 / 20190701095406 / files
nightly mac
b3eb1ee3b85d / 69.0a1 / 20190701095406 / files
nightly win32
b3eb1ee3b85d / 69.0a1 / 20190701095406 / files
nightly win64
b3eb1ee3b85d / 69.0a1 / 20190701095406 / files
last release without
nightly linux32
nightly linux64
nightly mac
nightly win32
nightly win64
releases
nightly linux32
nightly linux64
nightly mac
nightly win32
nightly win64
Merge inbound to mozilla-central. a=merge
--- a/dom/base/nsScreen.h
+++ b/dom/base/nsScreen.h
@@ -115,17 +115,17 @@ class nsScreen : public mozilla::DOMEven
     return mozilla::dom::ScreenColorGamut::Srgb;
   }
 
   already_AddRefed<mozilla::dom::ScreenLuminance> GetLuminance() const {
     return nullptr;
   }
 
   static bool MediaCapabilitiesEnabled(JSContext* aCx, JSObject* aGlobal) {
-    return mozilla::StaticPrefs::MediaCapabilitiesScreenEnabled();
+    return mozilla::StaticPrefs::media_media_capabilities_screen_enabled();
   }
 
   IMPL_EVENT_HANDLER(change);
 
   // Deprecated
   void GetMozOrientation(nsString& aOrientation,
                          mozilla::dom::CallerType aCallerType) const;
 
--- a/dom/canvas/WebGL2ContextSync.cpp
+++ b/dom/canvas/WebGL2ContextSync.cpp
@@ -62,17 +62,17 @@ GLenum WebGL2Context::ClientWaitSync(con
 
   if (timeout > kMaxClientWaitSyncTimeoutNS) {
     ErrorInvalidOperation("`timeout` must not exceed %s nanoseconds.",
                           "MAX_CLIENT_WAIT_TIMEOUT_WEBGL");
     return LOCAL_GL_WAIT_FAILED;
   }
 
   const bool canBeAvailable =
-      (sync.mCanBeAvailable || StaticPrefs::WebGLImmediateQueries());
+      (sync.mCanBeAvailable || StaticPrefs::webgl_allow_immediate_queries());
   if (!canBeAvailable) {
     if (timeout) {
       GenerateWarning(
           "Sync object not yet queryable. Please wait for the event"
           " loop.");
     }
     return LOCAL_GL_WAIT_FAILED;
   }
@@ -113,17 +113,17 @@ void WebGL2Context::GetSyncParameter(JSC
   retval.setNull();
   if (IsContextLost()) return;
 
   if (!ValidateObject("sync", sync)) return;
 
   ////
 
   const bool canBeAvailable =
-      (sync.mCanBeAvailable || StaticPrefs::WebGLImmediateQueries());
+      (sync.mCanBeAvailable || StaticPrefs::webgl_allow_immediate_queries());
   if (!canBeAvailable && pname == LOCAL_GL_SYNC_STATUS) {
     retval.set(JS::Int32Value(LOCAL_GL_UNSIGNALED));
     return;
   }
 
   GLint result = 0;
   switch (pname) {
     case LOCAL_GL_OBJECT_TYPE:
--- a/dom/canvas/WebGLContext.cpp
+++ b/dom/canvas/WebGLContext.cpp
@@ -93,18 +93,18 @@ namespace mozilla {
 
 using namespace mozilla::dom;
 using namespace mozilla::gfx;
 using namespace mozilla::gl;
 using namespace mozilla::layers;
 
 WebGLContextOptions::WebGLContextOptions() {
   // Set default alpha state based on preference.
-  alpha = !StaticPrefs::WebGLDefaultNoAlpha();
-  antialias = StaticPrefs::WebGLDefaultAntialias();
+  alpha = !StaticPrefs::webgl_default_no_alpha();
+  antialias = StaticPrefs::webgl_default_antialias();
 }
 
 bool WebGLContextOptions::operator==(const WebGLContextOptions& r) const {
   bool eq = true;
   eq &= (alpha == r.alpha);
   eq &= (depth == r.depth);
   eq &= (stencil == r.stencil);
   eq &= (premultipliedAlpha == r.premultipliedAlpha);
@@ -113,28 +113,28 @@ bool WebGLContextOptions::operator==(con
   eq &= (failIfMajorPerformanceCaveat == r.failIfMajorPerformanceCaveat);
   eq &= (powerPreference == r.powerPreference);
   return eq;
 }
 
 WebGLContext::WebGLContext()
     : gl(mGL_OnlyClearInDestroyResourcesAndContext)  // const reference
       ,
-      mMaxPerfWarnings(StaticPrefs::WebGLMaxPerfWarnings()),
+      mMaxPerfWarnings(StaticPrefs::webgl_perf_max_warnings()),
       mNumPerfWarnings(0),
       mMaxAcceptableFBStatusInvals(
-          StaticPrefs::WebGLMaxAcceptableFBStatusInvals()),
+          StaticPrefs::webgl_perf_max_acceptable_fb_status_invals()),
       mDataAllocGLCallCount(0),
       mEmptyTFO(0),
       mContextLossHandler(this),
       mNeedsFakeNoAlpha(false),
       mNeedsFakeNoDepth(false),
       mNeedsFakeNoStencil(false),
-      mAllowFBInvalidation(StaticPrefs::WebGLFBInvalidation()),
-      mMsaaSamples((uint8_t)StaticPrefs::WebGLMsaaSamples()) {
+      mAllowFBInvalidation(StaticPrefs::webgl_allow_fb_invalidation()),
+      mMsaaSamples((uint8_t)StaticPrefs::webgl_msaa_samples()) {
   mGeneration = 0;
   mInvalidated = false;
   mCapturedFrameInvalidated = false;
   mShouldPresent = true;
   mResetLayer = true;
   mOptionsFrozen = false;
   mDisableExtensions = false;
   mIsMesa = false;
@@ -160,17 +160,17 @@ WebGLContext::WebGLContext()
   mLastLossWasSimulated = false;
   mLoseContextOnMemoryPressure = false;
   mCanLoseContextInForeground = true;
 
   mAlreadyGeneratedWarnings = 0;
   mAlreadyWarnedAboutFakeVertexAttrib0 = false;
   mAlreadyWarnedAboutViewportLargerThanDest = false;
 
-  mMaxWarnings = StaticPrefs::WebGLMaxWarningsPerContext();
+  mMaxWarnings = StaticPrefs::webgl_max_warnings_per_context();
   if (mMaxWarnings < -1) {
     GenerateWarning(
         "webgl.max-warnings-per-context size is too large (seems like a "
         "negative value wrapped)");
     mMaxWarnings = 0;
   }
 
   mLastUseIndex = 0;
@@ -362,17 +362,17 @@ WebGLContext::SetContextOptions(JSContex
     newOpts.antialias = attributes.mAntialias.Value();
   }
 
   // Don't do antialiasing if we've disabled MSAA.
   if (!mMsaaSamples) {
     newOpts.antialias = false;
   }
 
-  if (newOpts.antialias && !StaticPrefs::WebGLForceMSAA()) {
+  if (newOpts.antialias && !StaticPrefs::webgl_msaa_force()) {
     const nsCOMPtr<nsIGfxInfo> gfxInfo = services::GetGfxInfo();
 
     nsCString blocklistId;
     if (IsFeatureInBlacklist(gfxInfo, nsIGfxInfo::FEATURE_WEBGL_MSAA,
                              &blocklistId)) {
       GenerateWarning(
           "Disallowing antialiased backbuffers due to blacklisting.");
       newOpts.antialias = false;
@@ -474,17 +474,17 @@ bool WebGLContext::CreateAndInitGL(
   bool tryANGLE = false;
 
   if (forceEnabled) {
     flags |= gl::CreateContextFlags::FORCE_ENABLE_HARDWARE;
   }
 
   if (IsWebGL2()) {
     flags |= gl::CreateContextFlags::PREFER_ES3;
-  } else if (!StaticPrefs::WebGL1AllowCoreProfile()) {
+  } else if (!StaticPrefs::webgl_1_allow_core_profiles()) {
     flags |= gl::CreateContextFlags::REQUIRE_COMPAT_PROFILE;
   }
 
   {
     bool highPower = false;
     switch (mOptions.powerPreference) {
       case dom::WebGLPowerPreference::Low_power:
         highPower = false;
@@ -499,17 +499,17 @@ bool WebGLContext::CreateAndInitGL(
         // ForceDiscreteGPUHelperCGL iff we decide it's a high-performance
         // application:
         // - Non-trivial canvas size
         // - Many draw calls
         // - Same origin with root page (try to stem bleeding from WebGL
         // ads/trackers)
       default:
         highPower = true;
-        if (StaticPrefs::WebGLDefaultLowPower()) {
+        if (StaticPrefs::webgl_default_low_power()) {
           highPower = false;
         } else if (mCanvasElement && !mCanvasElement->GetParentNode()) {
           GenerateWarning(
               "WebGLContextAttributes.powerPreference: 'default' when <canvas>"
               " has no parent Element defaults to 'low-power'.");
           highPower = false;
         }
         break;
@@ -554,22 +554,22 @@ bool WebGLContext::CreateAndInitGL(
   // --
 
   const bool useEGL = PR_GetEnv("MOZ_WEBGL_FORCE_EGL");
 
 #ifdef XP_WIN
   tryNativeGL = false;
   tryANGLE = true;
 
-  if (StaticPrefs::WebGLDisableWGL()) {
+  if (StaticPrefs::webgl_disable_wgl()) {
     tryNativeGL = false;
   }
 
-  if (StaticPrefs::WebGLDisableANGLE() || PR_GetEnv("MOZ_WEBGL_FORCE_OPENGL") ||
-      useEGL) {
+  if (StaticPrefs::webgl_disable_angle() ||
+      PR_GetEnv("MOZ_WEBGL_FORCE_OPENGL") || useEGL) {
     tryNativeGL = true;
     tryANGLE = false;
   }
 #endif
 
   if (tryNativeGL && !forceEnabled) {
     const nsCOMPtr<nsIGfxInfo> gfxInfo = services::GetGfxInfo();
     const auto feature = nsIGfxInfo::FEATURE_WEBGL_OPENGL;
@@ -814,50 +814,50 @@ WebGLContext::SetDimensions(int32_t sign
     return NS_ERROR_FAILURE;
   }
 
   // increment the generation number - Do this early because later
   // in CreateOffscreenGL(), "default" objects are created that will
   // pick up the old generation.
   ++mGeneration;
 
-  bool disabled = StaticPrefs::WebGLDisabled();
+  bool disabled = StaticPrefs::webgl_disabled();
 
   // TODO: When we have software webgl support we should use that instead.
   disabled |= gfxPlatform::InSafeMode();
 
   if (disabled) {
     if (gfxPlatform::InSafeMode()) {
       failureId = NS_LITERAL_CSTRING("FEATURE_FAILURE_WEBGL_SAFEMODE");
     } else {
       failureId = NS_LITERAL_CSTRING("FEATURE_FAILURE_WEBGL_DISABLED");
     }
     const nsLiteralCString text("WebGL is currently disabled.");
     ThrowEvent_WebGLContextCreationError(text);
     return NS_ERROR_FAILURE;
   }
 
-  if (StaticPrefs::WebGLDisableFailIfMajorPerformanceCaveat()) {
+  if (StaticPrefs::webgl_disable_fail_if_major_performance_caveat()) {
     mOptions.failIfMajorPerformanceCaveat = false;
   }
 
   if (mOptions.failIfMajorPerformanceCaveat) {
     nsCOMPtr<nsIGfxInfo> gfxInfo = services::GetGfxInfo();
     if (!HasAcceleratedLayers(gfxInfo)) {
       failureId = NS_LITERAL_CSTRING("FEATURE_FAILURE_WEBGL_PERF_CAVEAT");
       const nsLiteralCString text(
           "failIfMajorPerformanceCaveat: Compositor is not"
           " hardware-accelerated.");
       ThrowEvent_WebGLContextCreationError(text);
       return NS_ERROR_FAILURE;
     }
   }
 
   // Alright, now let's start trying.
-  bool forceEnabled = StaticPrefs::WebGLForceEnabled();
+  bool forceEnabled = StaticPrefs::webgl_force_enabled();
   ScopedGfxFeatureReporter reporter("WebGL", forceEnabled);
 
   MOZ_ASSERT(!gl);
   std::vector<FailureReason> failReasons;
   if (!CreateAndInitGL(forceEnabled, &failReasons)) {
     nsCString text("WebGL creation failed: ");
     for (const auto& cur : failReasons) {
       // Don't try to accumulate using an empty key if |cur.key| is empty.
@@ -979,19 +979,19 @@ WebGLContext::SetDimensions(int32_t sign
 
   failureId = NS_LITERAL_CSTRING("SUCCESS");
 
   gl->ResetSyncCallCount("WebGLContext Initialization");
   return NS_OK;
 }
 
 void WebGLContext::LoseOldestWebGLContextIfLimitExceeded() {
-  const auto maxWebGLContexts = StaticPrefs::WebGLMaxContexts();
+  const auto maxWebGLContexts = StaticPrefs::webgl_max_contexts();
   auto maxWebGLContextsPerPrincipal =
-      StaticPrefs::WebGLMaxContextsPerPrincipal();
+      StaticPrefs::webgl_max_contexts_per_principal();
 
   // maxWebGLContextsPerPrincipal must be less than maxWebGLContexts
   MOZ_ASSERT(maxWebGLContextsPerPrincipal <= maxWebGLContexts);
   maxWebGLContextsPerPrincipal =
       std::min(maxWebGLContextsPerPrincipal, maxWebGLContexts);
 
   if (!NS_IsMainThread()) {
     // XXX mtseng: bug 709490, WebGLMemoryTracker is not thread safe.
@@ -1343,17 +1343,17 @@ ScopedPrepForResourceClear::~ScopedPrepF
   gl->fClearStencil(webgl.mStencilClearValue);
 }
 
 }  // namespace webgl
 
 // -
 
 void WebGLContext::OnEndOfFrame() const {
-  if (StaticPrefs::WebGLSpewFrameAllocs()) {
+  if (StaticPrefs::webgl_perf_spew_frame_allocs()) {
     GeneratePerfWarning("[webgl.perf.spew-frame-allocs] %" PRIu64
                         " data allocations this frame.",
                         mDataAllocGLCallCount);
   }
   mDataAllocGLCallCount = 0;
   gl->ResetSyncCallCount("WebGLContext PresentScreenBuffer");
 }
 
--- a/dom/canvas/WebGLContextExtensions.cpp
+++ b/dom/canvas/WebGLContextExtensions.cpp
@@ -77,17 +77,17 @@ bool WebGLContext::IsExtensionSupported(
 
   // Chrome contexts need access to debug information even when
   // webgl.disable-extensions is set. This is used in the graphics
   // section of about:support
   if (callerType == dom::CallerType::System) {
     allowPrivilegedExts = true;
   }
 
-  if (StaticPrefs::WebGLPrivilegedExtensionsEnabled()) {
+  if (StaticPrefs::webgl_enable_privileged_extensions()) {
     allowPrivilegedExts = true;
   }
 
   if (allowPrivilegedExts) {
     switch (ext) {
       case WebGLExtensionID::EXT_disjoint_timer_query:
         return WebGLExtensionDisjointTimerQuery::IsSupported(this);
       case WebGLExtensionID::MOZ_debug:
--- a/dom/canvas/WebGLContextValidate.cpp
+++ b/dom/canvas/WebGLContextValidate.cpp
@@ -283,20 +283,21 @@ bool WebGLContext::InitAndValidateGL(Fai
     const nsPrintfCString reason(
         "GL error 0x%x occurred during OpenGL context"
         " initialization, before WebGL initialization!",
         error);
     *out_failReason = {"FEATURE_FAILURE_WEBGL_GLERR_1", reason};
     return false;
   }
 
-  mDisableExtensions = StaticPrefs::WebGLDisableExtensions();
+  mDisableExtensions = StaticPrefs::webgl_disable_extensions();
   mLoseContextOnMemoryPressure =
-      StaticPrefs::WebGLLoseContextOnMemoryPressure();
-  mCanLoseContextInForeground = StaticPrefs::WebGLCanLoseContextInForeground();
+      StaticPrefs::webgl_lose_context_on_memory_pressure();
+  mCanLoseContextInForeground =
+      StaticPrefs::webgl_can_lose_context_in_foreground();
 
   // These are the default values, see 6.2 State tables in the
   // OpenGL ES 2.0.25 spec.
   mColorWriteMask = 0x0f;
   mDriverColorMask = mColorWriteMask;
   mColorClearValue[0] = 0.f;
   mColorClearValue[1] = 0.f;
   mColorClearValue[2] = 0.f;
@@ -471,17 +472,17 @@ bool WebGLContext::InitAndValidateGL(Fai
 
   const GLenum driverPName = gl->IsCoreProfile()
                                  ? LOCAL_GL_POINT_SIZE_RANGE
                                  : LOCAL_GL_ALIASED_POINT_SIZE_RANGE;
   gl->fGetFloatv(driverPName, mGLAliasedPointSizeRange);
 
   ////////////////
 
-  if (StaticPrefs::WebGLMinCapabilityMode()) {
+  if (StaticPrefs::webgl_min_capability_mode()) {
     bool ok = true;
 
     ok &= RestrictCap(&mGLMaxVertexTextureImageUnits,
                       kMinMaxVertexTextureImageUnits);
     ok &= RestrictCap(&mGLMaxFragmentTextureImageUnits,
                       kMinMaxFragmentTextureImageUnits);
     ok &= RestrictCap(&mGLMaxCombinedTextureImageUnits,
                       kMinMaxCombinedTextureImageUnits);
@@ -648,25 +649,25 @@ bool WebGLContext::InitAndValidateGL(Fai
   static const float kDefaultGenericVertexAttribData[4] = {0, 0, 0, 1};
   memcpy(mGenericVertexAttrib0Data, kDefaultGenericVertexAttribData,
          sizeof(mGenericVertexAttrib0Data));
 
   mFakeVertexAttrib0BufferObject = 0;
 
   mNeedsIndexValidation =
       !gl->IsSupported(gl::GLFeature::robust_buffer_access_behavior);
-  switch (StaticPrefs::WebGLForceIndexValidation()) {
+  switch (StaticPrefs::webgl_force_index_validation()) {
     case -1:
       mNeedsIndexValidation = false;
       break;
     case 1:
       mNeedsIndexValidation = true;
       break;
     default:
-      MOZ_ASSERT(StaticPrefs::WebGLForceIndexValidation() == 0);
+      MOZ_ASSERT(StaticPrefs::webgl_force_index_validation() == 0);
       break;
   }
 
   return true;
 }
 
 bool WebGLContext::ValidateFramebufferTarget(GLenum target) const {
   bool isValid = true;
--- a/dom/canvas/WebGLExtensions.cpp
+++ b/dom/canvas/WebGLExtensions.cpp
@@ -31,17 +31,17 @@ NS_IMPL_CYCLE_COLLECTION_UNROOT_NATIVE(W
 // -
 
 WebGLExtensionExplicitPresent::WebGLExtensionExplicitPresent(WebGLContext* const webgl)
     : WebGLExtensionBase(webgl) {
   MOZ_ASSERT(IsSupported(webgl), "Don't construct extension if unsupported.");
 }
 
 bool WebGLExtensionExplicitPresent::IsSupported(const WebGLContext* const webgl) {
-  return StaticPrefs::WebGLDraftExtensionsEnabled();
+  return StaticPrefs::webgl_enable_draft_extensions();
 }
 
 void WebGLExtensionExplicitPresent::Present() const {
   if (mIsLost || !mContext) return;
   mContext->PresentScreenBuffer();
 }
 
 IMPL_WEBGL_EXTENSION_GOOP(WebGLExtensionExplicitPresent, WEBGL_explicit_present)
@@ -76,17 +76,19 @@ WebGLExtensionFBORenderMipmap::WebGLExte
   MOZ_ASSERT(IsSupported(webgl), "Don't construct extension if unsupported.");
 }
 
 WebGLExtensionFBORenderMipmap::~WebGLExtensionFBORenderMipmap() = default;
 
 bool WebGLExtensionFBORenderMipmap::IsSupported(
     const WebGLContext* const webgl) {
   if (webgl->IsWebGL2()) return false;
-  if (!StaticPrefs::WebGLDraftExtensionsEnabled()) return false;
+  if (!StaticPrefs::webgl_enable_draft_extensions()) {
+    return false;
+  }
 
   const auto& gl = webgl->gl;
   if (!gl->IsGLES()) return true;
   if (gl->Version() >= 300) return true;
   return gl->IsExtensionSupported(gl::GLContext::OES_fbo_render_mipmap);
 }
 
 IMPL_WEBGL_EXTENSION_GOOP(WebGLExtensionFBORenderMipmap, OES_fbo_render_mipmap)
@@ -97,17 +99,19 @@ WebGLExtensionMultiview::WebGLExtensionM
     : WebGLExtensionBase(webgl) {
   MOZ_ASSERT(IsSupported(webgl), "Don't construct extension if unsupported.");
 }
 
 WebGLExtensionMultiview::~WebGLExtensionMultiview() = default;
 
 bool WebGLExtensionMultiview::IsSupported(const WebGLContext* const webgl) {
   if (!webgl->IsWebGL2()) return false;
-  if (!StaticPrefs::WebGLDraftExtensionsEnabled()) return false;
+  if (!StaticPrefs::webgl_enable_draft_extensions()) {
+    return false;
+  }
 
   const auto& gl = webgl->gl;
   return gl->IsSupported(gl::GLFeature::multiview);
 }
 
 void WebGLExtensionMultiview::FramebufferTextureMultiviewOVR(
     const GLenum target, const GLenum attachment, WebGLTexture* const texture,
     const GLint level, const GLint baseViewIndex,
--- a/dom/canvas/WebGLQuery.cpp
+++ b/dom/canvas/WebGLQuery.cpp
@@ -111,17 +111,17 @@ void WebGLQuery::GetQueryParameter(GLenu
   if (mActiveSlot)
     return mContext->ErrorInvalidOperation("Query is still active.");
 
   // End of validation
   ////
 
   // We must usually wait for an event loop before the query can be available.
   const bool canBeAvailable =
-      (mCanBeAvailable || StaticPrefs::WebGLImmediateQueries());
+      (mCanBeAvailable || StaticPrefs::webgl_allow_immediate_queries());
   if (!canBeAvailable) {
     if (pname == LOCAL_GL_QUERY_RESULT_AVAILABLE) {
       retval.set(JS::BooleanValue(false));
     }
     return;
   }
 
   const auto& gl = mContext->gl;
--- a/dom/canvas/WebGLShaderValidator.cpp
+++ b/dom/canvas/WebGLShaderValidator.cpp
@@ -62,17 +62,17 @@ static ShCompileOptions ChooseValidatorC
 
     if (!gl->IsANGLE() && gl->Vendor() == gl::GLVendor::Intel) {
       // Failures on at least Windows+Intel+OGL on:
       // conformance/glsl/constructors/glsl-construct-mat2.html
       options |= SH_SCALARIZE_VEC_AND_MAT_CONSTRUCTOR_ARGS;
     }
   }
 
-  if (StaticPrefs::WebGLAllANGLEOptions()) {
+  if (StaticPrefs::webgl_all_angle_options()) {
     options = -1;
 
     options ^= SH_INTERMEDIATE_TREE;
     options ^= SH_LINE_DIRECTIVES;
     options ^= SH_SOURCE_PATH;
 
     options ^= SH_LIMIT_EXPRESSION_COMPLEXITY;
     options ^= SH_LIMIT_CALL_STACK_DEPTH;
--- a/dom/canvas/WebGLTextureUpload.cpp
+++ b/dom/canvas/WebGLTextureUpload.cpp
@@ -323,17 +323,17 @@ UniquePtr<webgl::TexUnpackBlob> WebGLCon
   auto sfer = nsLayoutUtils::SurfaceFromElement(
       const_cast<dom::Element*>(&elem), flags, idealDrawTarget);
 
   //////
 
   uint32_t elemWidth = 0;
   uint32_t elemHeight = 0;
   layers::Image* layersImage = nullptr;
-  if (!StaticPrefs::WebGLDisableDOMBlitUploads() && sfer.mLayersImage) {
+  if (!StaticPrefs::webgl_disable_DOM_blit_uploads() && sfer.mLayersImage) {
     layersImage = sfer.mLayersImage;
     elemWidth = layersImage->GetSize().width;
     elemHeight = layersImage->GetSize().height;
   }
 
   RefPtr<gfx::DataSourceSurface> dataSurf;
   if (!layersImage && sfer.GetSourceSurface()) {
     const auto surf = sfer.GetSourceSurface();
--- a/dom/html/HTMLMediaElement.cpp
+++ b/dom/html/HTMLMediaElement.cpp
@@ -3708,17 +3708,17 @@ already_AddRefed<Promise> HTMLMediaEleme
   } else {
     AUTOPLAY_LOG("reject MediaElement %p to play", this);
     AsyncRejectPendingPlayPromises(NS_ERROR_DOM_MEDIA_NOT_ALLOWED_ERR);
   }
   return promise.forget();
 }
 
 void HTMLMediaElement::DispatchEventsWhenPlayWasNotAllowed() {
-  if (StaticPrefs::MediaBlockEventEnabled()) {
+  if (StaticPrefs::media_autoplay_block_event_enabled()) {
     DispatchAsyncEvent(NS_LITERAL_STRING("blocked"));
   }
 #if defined(MOZ_WIDGET_ANDROID)
   RefPtr<AsyncEventDispatcher> asyncDispatcher = new AsyncEventDispatcher(
       this, NS_LITERAL_STRING("MozAutoplayMediaBlocked"), CanBubble::eYes,
       ChromeOnlyDispatch::eYes);
   asyncDispatcher->PostDOMEvent();
 #endif
@@ -4102,17 +4102,17 @@ void HTMLMediaElement::HiddenVideoStart(
   MOZ_ASSERT(NS_IsMainThread());
   mHiddenPlayTime.Start();
   if (mVideoDecodeSuspendTimer) {
     // Already started, just keep it running.
     return;
   }
   NS_NewTimerWithFuncCallback(
       getter_AddRefs(mVideoDecodeSuspendTimer), VideoDecodeSuspendTimerCallback,
-      this, StaticPrefs::MediaSuspendBkgndVideoDelayMs(),
+      this, StaticPrefs::media_suspend_bkgnd_video_delay_ms(),
       nsITimer::TYPE_ONE_SHOT,
       "HTMLMediaElement::VideoDecodeSuspendTimerCallback",
       mMainThreadEventTarget);
 }
 
 void HTMLMediaElement::HiddenVideoStop() {
   MOZ_ASSERT(NS_IsMainThread());
   mHiddenPlayTime.Pause();
@@ -4270,17 +4270,18 @@ void HTMLMediaElement::ReportTelemetry()
                               NS_LITERAL_CSTRING("All"), max_ms);
         LOG(LogLevel::Debug,
             ("%p VIDEO_INTER_KEYFRAME_MAX_MS = %u, keys: '%s' and 'All'", this,
              max_ms, key.get()));
       } else {
         // Here, we have played *some* of the video, but didn't get more than 1
         // keyframe. Report '0' if we have played for longer than the video-
         // decode-suspend delay (showing recovery would be difficult).
-        uint32_t suspendDelay_ms = StaticPrefs::MediaSuspendBkgndVideoDelayMs();
+        uint32_t suspendDelay_ms =
+            StaticPrefs::media_suspend_bkgnd_video_delay_ms();
         if (uint32_t(playTime * 1000.0) > suspendDelay_ms) {
           Telemetry::Accumulate(Telemetry::VIDEO_INTER_KEYFRAME_MAX_MS, key, 0);
           Telemetry::Accumulate(Telemetry::VIDEO_INTER_KEYFRAME_MAX_MS,
                                 NS_LITERAL_CSTRING("All"), 0);
           LOG(LogLevel::Debug,
               ("%p VIDEO_INTER_KEYFRAME_MAX_MS = 0 (only 1 keyframe), keys: "
                "'%s' and 'All'",
                this, key.get()));
--- a/dom/media/AutoplayPolicy.cpp
+++ b/dom/media/AutoplayPolicy.cpp
@@ -149,36 +149,36 @@ static bool IsAudioContextAllowedToPlay(
   // Offline context won't directly output sound to audio devices.
   return aContext.IsOffline() ||
          IsWindowAllowedToPlay(aContext.GetParentObject());
 }
 
 static bool IsEnableBlockingWebAudioByUserGesturePolicy() {
   return DefaultAutoplayBehaviour() != nsIAutoplay::ALLOWED &&
          Preferences::GetBool("media.autoplay.block-webaudio", false) &&
-         StaticPrefs::MediaAutoplayUserGesturesNeeded();
+         StaticPrefs::media_autoplay_enabled_user_gestures_needed();
 }
 
 /* static */
 bool AutoplayPolicy::WouldBeAllowedToPlayIfAutoplayDisabled(
     const HTMLMediaElement& aElement) {
   return IsMediaElementInaudible(aElement) ||
          IsWindowAllowedToPlay(aElement.OwnerDoc()->GetInnerWindow());
 }
 
 /* static */
 bool AutoplayPolicy::WouldBeAllowedToPlayIfAutoplayDisabled(
     const AudioContext& aContext) {
   return IsAudioContextAllowedToPlay(aContext);
 }
 
 static bool IsAllowedToPlayByBlockingModel(const HTMLMediaElement& aElement) {
-  if (!StaticPrefs::MediaAutoplayUserGesturesNeeded()) {
-  // If element is blessed, it would always be allowed to play().
-  return aElement.IsBlessed() || EventStateManager::IsHandlingUserInput();
+  if (!StaticPrefs::media_autoplay_enabled_user_gestures_needed()) {
+    // If element is blessed, it would always be allowed to play().
+    return aElement.IsBlessed() || EventStateManager::IsHandlingUserInput();
   }
   return IsWindowAllowedToPlay(aElement.OwnerDoc()->GetInnerWindow());
 }
 
 static bool IsAllowedToPlayInternal(const HTMLMediaElement& aElement) {
   Document* approver = ApproverDocOf(*aElement.OwnerDoc());
 
   bool isInaudible = IsMediaElementInaudible(aElement);
--- a/dom/media/BackgroundVideoDecodingPermissionObserver.cpp
+++ b/dom/media/BackgroundVideoDecodingPermissionObserver.cpp
@@ -18,17 +18,17 @@ BackgroundVideoDecodingPermissionObserve
     : mDecoder(aDecoder), mIsRegisteredForEvent(false) {
   MOZ_ASSERT(mDecoder);
 }
 
 NS_IMETHODIMP
 BackgroundVideoDecodingPermissionObserver::Observe(nsISupports* aSubject,
                                                    const char* aTopic,
                                                    const char16_t* aData) {
-  if (!StaticPrefs::MediaResumeBkgndVideoOnTabhover()) {
+  if (!StaticPrefs::media_resume_bkgnd_video_on_tabhover()) {
     return NS_OK;
   }
 
   if (!IsValidEventSender(aSubject)) {
     return NS_OK;
   }
 
   if (strcmp(aTopic, "unselected-tab-hover") == 0) {
--- a/dom/media/Benchmark.cpp
+++ b/dom/media/Benchmark.cpp
@@ -61,47 +61,48 @@ bool VP9Benchmark::ShouldRun() {
 #endif
 }
 
 // static
 uint32_t VP9Benchmark::MediaBenchmarkVp9Fps() {
   if (!ShouldRun()) {
     return 0;
   }
-  return StaticPrefs::MediaBenchmarkVp9Fps();
+  return StaticPrefs::media_benchmark_vp9_fps();
 }
 
 // static
 bool VP9Benchmark::IsVP9DecodeFast(bool aDefault) {
 #if defined(MOZ_WIDGET_ANDROID)
   return false;
 #else
   if (!ShouldRun()) {
     return false;
   }
   static StaticMutex sMutex;
-  uint32_t decodeFps = StaticPrefs::MediaBenchmarkVp9Fps();
-  uint32_t hadRecentUpdate = StaticPrefs::MediaBenchmarkVp9Versioncheck();
+  uint32_t decodeFps = StaticPrefs::media_benchmark_vp9_fps();
+  uint32_t hadRecentUpdate = StaticPrefs::media_benchmark_vp9_versioncheck();
   bool needBenchmark;
   {
     StaticMutexAutoLock lock(sMutex);
     needBenchmark = !sHasRunTest &&
                     (decodeFps == 0 || hadRecentUpdate != sBenchmarkVersionID);
     sHasRunTest = true;
   }
 
   if (needBenchmark) {
     RefPtr<WebMDemuxer> demuxer = new WebMDemuxer(
         new BufferMediaResource(sWebMSample, sizeof(sWebMSample)));
     RefPtr<Benchmark> estimiser = new Benchmark(
         demuxer,
-        {StaticPrefs::MediaBenchmarkFrames(),  // frames to measure
+        {StaticPrefs::media_benchmark_frames(),  // frames to measure
          1,  // start benchmarking after decoding this frame.
          8,  // loop after decoding that many frames.
-         TimeDuration::FromMilliseconds(StaticPrefs::MediaBenchmarkTimeout())});
+         TimeDuration::FromMilliseconds(
+             StaticPrefs::media_benchmark_timeout())});
     estimiser->Run()->Then(
         AbstractThread::MainThread(), __func__,
         [](uint32_t aDecodeFps) {
           if (XRE_IsContentProcess()) {
             dom::ContentChild* contentChild = dom::ContentChild::GetSingleton();
             if (contentChild) {
               contentChild->SendNotifyBenchmarkResult(NS_LITERAL_STRING("VP9"),
                                                       aDecodeFps);
@@ -116,17 +117,17 @@ bool VP9Benchmark::IsVP9DecodeFast(bool 
         },
         []() {});
   }
 
   if (decodeFps == 0) {
     return aDefault;
   }
 
-  return decodeFps >= StaticPrefs::MediaBenchmarkVp9Threshold();
+  return decodeFps >= StaticPrefs::media_benchmark_vp9_threshold();
 #endif
 }
 
 Benchmark::Benchmark(MediaDataDemuxer* aDemuxer, const Parameters& aParameters)
     : QueueObject(new TaskQueue(GetMediaThreadPool(MediaThreadType::PLAYBACK),
                                 "Benchmark::QueueObject")),
       mParameters(aParameters),
       mKeepAliveUntilComplete(this),
--- a/dom/media/ChannelMediaDecoder.cpp
+++ b/dom/media/ChannelMediaDecoder.cpp
@@ -463,17 +463,17 @@ bool ChannelMediaDecoder::ShouldThrottle
   // (so that we always throttle at the readahead limit on mobile if using
   // a cellular network) or if the download is fast enough that there's no
   // concern about playback being interrupted.
   MOZ_ASSERT(NS_IsMainThread());
   NS_ENSURE_TRUE(GetStateMachine(), false);
 
   int64_t length = aStats.mTotalBytes;
   if (length > 0 &&
-      length <= int64_t(StaticPrefs::MediaMemoryCacheMaxSize()) * 1024) {
+      length <= int64_t(StaticPrefs::media_memory_cache_max_size()) * 1024) {
     // Don't throttle the download of small resources. This is to speed
     // up seeking, as seeks into unbuffered ranges would require starting
     // up a new HTTP transaction, which adds latency.
     return false;
   }
 
   if (OnCellularConnection() &&
       Preferences::GetBool(
--- a/dom/media/MediaCache.cpp
+++ b/dom/media/MediaCache.cpp
@@ -321,29 +321,29 @@ class MediaCache {
     Truncate();
     NS_ASSERTION(mIndex.Length() == 0, "Blocks leaked?");
 
     MOZ_COUNT_DTOR(MediaCache);
   }
 
   static size_t CacheSize() {
     MOZ_ASSERT(sThread->IsOnCurrentThread());
-    return sOnCellular ? StaticPrefs::MediaCacheCellularSize()
-                       : StaticPrefs::MediaCacheSize();
+    return sOnCellular ? StaticPrefs::media_cache_size_cellular()
+                       : StaticPrefs::media_cache_size();
   }
 
   static size_t ReadaheadLimit() {
     MOZ_ASSERT(sThread->IsOnCurrentThread());
-    return sOnCellular ? StaticPrefs::MediaCacheCellularReadaheadLimit()
-                       : StaticPrefs::MediaCacheReadaheadLimit();
+    return sOnCellular ? StaticPrefs::media_cache_readahead_limit_cellular()
+                       : StaticPrefs::media_cache_readahead_limit();
   }
 
   static size_t ResumeThreshold() {
-    return sOnCellular ? StaticPrefs::MediaCacheCellularResumeThreshold()
-                       : StaticPrefs::MediaCacheResumeThreshold();
+    return sOnCellular ? StaticPrefs::media_cache_resume_threshold_cellular()
+                       : StaticPrefs::media_cache_resume_threshold();
   }
 
   // Find a free or reusable block and return its index. If there are no
   // free blocks and no reusable blocks, add a new block to the cache
   // and return it. Can return -1 on OOM.
   int32_t FindBlockForIncomingData(AutoLock&, TimeStamp aNow,
                                    MediaCacheStream* aStream,
                                    int32_t aStreamBlockIndex);
@@ -784,17 +784,17 @@ RefPtr<MediaCache> MediaCache::GetMediaC
   }
 
   if (!sThread) {
     return nullptr;
   }
 
   if (aContentLength > 0 &&
       aContentLength <=
-          int64_t(StaticPrefs::MediaMemoryCacheMaxSize()) * 1024) {
+          int64_t(StaticPrefs::media_memory_cache_max_size()) * 1024) {
     // Small-enough resource, use a new memory-backed MediaCache.
     RefPtr<MediaBlockCacheBase> bc = new MemoryBlockCache(aContentLength);
     nsresult rv = bc->Init();
     if (NS_SUCCEEDED(rv)) {
       RefPtr<MediaCache> mc = new MediaCache(bc);
       LOG("GetMediaCache(%" PRIi64 ") -> Memory MediaCache %p", aContentLength,
           mc.get());
       return mc;
--- a/dom/media/MediaDecoder.cpp
+++ b/dom/media/MediaDecoder.cpp
@@ -1242,23 +1242,23 @@ bool MediaDecoder::CanPlayThrough() {
 
 RefPtr<SetCDMPromise> MediaDecoder::SetCDMProxy(CDMProxy* aProxy) {
   MOZ_ASSERT(NS_IsMainThread());
   return InvokeAsync<RefPtr<CDMProxy>>(mReader->OwnerThread(), mReader.get(),
                                        __func__,
                                        &MediaFormatReader::SetCDMProxy, aProxy);
 }
 
-bool MediaDecoder::IsOpusEnabled() { return StaticPrefs::MediaOpusEnabled(); }
+bool MediaDecoder::IsOpusEnabled() { return StaticPrefs::media_opus_enabled(); }
 
-bool MediaDecoder::IsOggEnabled() { return StaticPrefs::MediaOggEnabled(); }
+bool MediaDecoder::IsOggEnabled() { return StaticPrefs::media_ogg_enabled(); }
 
-bool MediaDecoder::IsWaveEnabled() { return StaticPrefs::MediaWaveEnabled(); }
+bool MediaDecoder::IsWaveEnabled() { return StaticPrefs::media_wave_enabled(); }
 
-bool MediaDecoder::IsWebMEnabled() { return StaticPrefs::MediaWebMEnabled(); }
+bool MediaDecoder::IsWebMEnabled() { return StaticPrefs::media_webm_enabled(); }
 
 NS_IMETHODIMP
 MediaMemoryTracker::CollectReports(nsIHandleReportCallback* aHandleReport,
                                    nsISupports* aData, bool aAnonymize) {
   // NB: When resourceSizes' ref count goes to 0 the promise will report the
   //     resources memory and finish the asynchronous memory report.
   RefPtr<MediaDecoder::ResourceSizes> resourceSizes =
       new MediaDecoder::ResourceSizes(MediaMemoryTracker::MallocSizeOf);
--- a/dom/media/MediaDecoderStateMachine.cpp
+++ b/dom/media/MediaDecoderStateMachine.cpp
@@ -168,17 +168,17 @@ static void DiscardFramesFromTail(MediaQ
     break;
   }
 }
 
 // Delay, in milliseconds, that tabs needs to be in background before video
 // decoding is suspended.
 static TimeDuration SuspendBackgroundVideoDelay() {
   return TimeDuration::FromMilliseconds(
-      StaticPrefs::MediaSuspendBkgndVideoDelayMs());
+      StaticPrefs::media_suspend_bkgnd_video_delay_ms());
 }
 
 class MediaDecoderStateMachine::StateObject {
  public:
   virtual ~StateObject() {}
   virtual void Exit() {}  // Exit action.
   virtual void Step() {}  // Perform a 'cycle' of this state object.
   virtual State GetState() const = 0;
@@ -690,17 +690,17 @@ class MediaDecoderStateMachine::Decoding
 
   void StartDormantTimer() {
     if (!mMaster->mMediaSeekable) {
       // Don't enter dormant if the media is not seekable because we need to
       // seek when exiting dormant.
       return;
     }
 
-    auto timeout = StaticPrefs::MediaDormantOnPauseTimeoutMs();
+    auto timeout = StaticPrefs::media_dormant_on_pause_timeout_ms();
     if (timeout < 0) {
       // Disabled when timeout is negative.
       return;
     } else if (timeout == 0) {
       // Enter dormant immediately without scheduling a timer.
       SetState<DormantState>();
       return;
     }
@@ -2204,17 +2204,17 @@ void MediaDecoderStateMachine::DecodeMet
   MOZ_ASSERT(mMaster->mDuration.Ref().isSome());
 
   mMaster->mMetadataLoadedEvent.Notify(std::move(aMetadata.mInfo),
                                        std::move(aMetadata.mTags),
                                        MediaDecoderEventVisibility::Observable);
 
   // Check whether the media satisfies the requirement of seamless looing.
   // (Before checking the media is audio only, we need to get metadata first.)
-  mMaster->mSeamlessLoopingAllowed = StaticPrefs::MediaSeamlessLooping() &&
+  mMaster->mSeamlessLoopingAllowed = StaticPrefs::media_seamless_looping() &&
                                      mMaster->HasAudio() &&
                                      !mMaster->HasVideo();
 
   SetState<DecodingFirstFrameState>();
 }
 
 void MediaDecoderStateMachine::DormantState::HandlePlayStateChanged(
     MediaDecoder::PlayState aPlayState) {
@@ -2951,17 +2951,17 @@ void MediaDecoderStateMachine::SetVideoD
 
   LOG("SetVideoDecodeModeInternal(), VideoDecodeMode=(%s->%s), "
       "mVideoDecodeSuspended=%c",
       mVideoDecodeMode == VideoDecodeMode::Normal ? "Normal" : "Suspend",
       aMode == VideoDecodeMode::Normal ? "Normal" : "Suspend",
       mVideoDecodeSuspended ? 'T' : 'F');
 
   // Should not suspend decoding if we don't turn on the pref.
-  if (!StaticPrefs::MediaSuspendBkgndVideoEnabled() &&
+  if (!StaticPrefs::media_suspend_bkgnd_video_enabled() &&
       aMode == VideoDecodeMode::Suspend) {
     LOG("SetVideoDecodeModeInternal(), early return because preference off and "
         "set to Suspend");
     return;
   }
 
   if (aMode == mVideoDecodeMode) {
     LOG("SetVideoDecodeModeInternal(), early return because the mode does not "
--- a/dom/media/MediaFormatReader.cpp
+++ b/dom/media/MediaFormatReader.cpp
@@ -865,19 +865,19 @@ MediaFormatReader::DemuxerProxy::NotifyD
 }
 
 MediaFormatReader::MediaFormatReader(MediaFormatReaderInit& aInit,
                                      MediaDataDemuxer* aDemuxer)
     : mTaskQueue(new TaskQueue(GetMediaThreadPool(MediaThreadType::PLAYBACK),
                                "MediaFormatReader::mTaskQueue",
                                /* aSupportsTailDispatch = */ true)),
       mAudio(this, MediaData::Type::AUDIO_DATA,
-             StaticPrefs::MediaAudioMaxDecodeError()),
+             StaticPrefs::media_audio_max_decode_error()),
       mVideo(this, MediaData::Type::VIDEO_DATA,
-             StaticPrefs::MediaVideoMaxDecodeError()),
+             StaticPrefs::media_video_max_decode_error()),
       mDemuxer(new DemuxerProxy(aDemuxer)),
       mDemuxerInitDone(false),
       mPendingNotifyDataArrived(false),
       mLastReportedNumDecodedFrames(0),
       mPreviousDecodedKeyframeTime_us(sNoPreviousDecodedKeyframe),
       mKnowsCompositor(aInit.mKnowsCompositor),
       mInitDone(false),
       mTrackDemuxersMayBlock(false),
@@ -1128,17 +1128,17 @@ MediaFormatReader::AsyncReadMetadata() {
       ->Track(mDemuxerInitRequest);
   return p;
 }
 
 void MediaFormatReader::OnDemuxerInitDone(const MediaResult& aResult) {
   MOZ_ASSERT(OnTaskQueue());
   mDemuxerInitRequest.Complete();
 
-  if (NS_FAILED(aResult) && StaticPrefs::MediaPlaybackWarningsAsErrors()) {
+  if (NS_FAILED(aResult) && StaticPrefs::media_playback_warnings_as_errors()) {
     mMetadataPromise.Reject(aResult, __func__);
     return;
   }
 
   mDemuxerInitDone = true;
 
   UniquePtr<MetadataTags> tags(MakeUnique<MetadataTags>());
 
@@ -1327,17 +1327,17 @@ MediaFormatReader::DecoderData& MediaFor
     return mAudio;
   }
   return mVideo;
 }
 
 bool MediaFormatReader::ShouldSkip(TimeUnit aTimeThreshold) {
   MOZ_ASSERT(HasVideo());
 
-  if (!StaticPrefs::MediaDecoderSkipToNextKeyFrameEnabled()) {
+  if (!StaticPrefs::media_decoder_skip_to_next_key_frame_enabled()) {
     return false;
   }
 
   TimeUnit nextKeyframe;
   nsresult rv = mVideo.mTrackDemuxer->GetNextRandomAccessPoint(&nextKeyframe);
   if (NS_FAILED(rv)) {
     // Only OggTrackDemuxer with video type gets into here.
     // We don't support skip-to-next-frame for this case.
@@ -1894,17 +1894,17 @@ void MediaFormatReader::HandleDemuxedSam
 
   RefPtr<MediaRawData> sample = decoder.mQueuedSamples[0];
   const RefPtr<TrackInfoSharedPtr> info = sample->mTrackInfo;
 
   if (info && decoder.mLastStreamSourceID != info->GetID()) {
     nsTArray<RefPtr<MediaRawData>> samples;
     if (decoder.mDecoder) {
       bool recyclable =
-          StaticPrefs::MediaDecoderRecycleEnabled() &&
+          StaticPrefs::media_decoder_recycle_enabled() &&
           decoder.mDecoder->SupportDecoderRecycling() &&
           (*info)->mCrypto.mCryptoScheme ==
               decoder.GetCurrentInfo()->mCrypto.mCryptoScheme &&
           (*info)->mMimeType == decoder.GetCurrentInfo()->mMimeType;
       if (!recyclable && decoder.mTimeThreshold.isNothing() &&
           (decoder.mNextStreamSourceID.isNothing() ||
            decoder.mNextStreamSourceID.ref() != info->GetID())) {
         LOG("%s stream id has changed from:%d to:%d, draining decoder.",
--- a/dom/media/MediaFormatReader.h
+++ b/dom/media/MediaFormatReader.h
@@ -440,17 +440,17 @@ class MediaFormatReader final
     bool HasFatalError() const {
       if (!mError.isSome()) {
         return false;
       }
       if (mError.ref() == NS_ERROR_DOM_MEDIA_DECODE_ERR) {
         // Allow decode errors to be non-fatal, but give up
         // if we have too many, or if warnings should be treated as errors.
         return mNumOfConsecutiveError > mMaxConsecutiveError ||
-               StaticPrefs::MediaPlaybackWarningsAsErrors();
+               StaticPrefs::media_playback_warnings_as_errors();
       } else if (mError.ref() == NS_ERROR_DOM_MEDIA_NEED_NEW_DECODER) {
         // If the caller asked for a new decoder we shouldn't treat
         // it as fatal.
         return false;
       } else {
         // All other error types are fatal
         return true;
       }
--- a/dom/media/MemoryBlockCache.cpp
+++ b/dom/media/MemoryBlockCache.cpp
@@ -127,17 +127,17 @@ enum MemoryBlockCacheTelemetryErrors {
   WriteBlockOverflow = 3,
   WriteBlockCannotGrow = 4,
   MoveBlockSourceOverrun = 5,
   MoveBlockDestOverflow = 6,
   MoveBlockCannotGrow = 7,
 };
 
 static int32_t CalculateMaxBlocks(int64_t aContentLength) {
-  int64_t maxSize = int64_t(StaticPrefs::MediaMemoryCacheMaxSize()) * 1024;
+  int64_t maxSize = int64_t(StaticPrefs::media_memory_cache_max_size()) * 1024;
   MOZ_ASSERT(aContentLength <= maxSize);
   MOZ_ASSERT(maxSize % MediaBlockCacheBase::BLOCK_SIZE == 0);
   // Note: It doesn't matter if calculations overflow, Init() would later fail.
   // We want at least enough blocks to contain the original content length.
   const int32_t requiredBlocks = maxSize / MediaBlockCacheBase::BLOCK_SIZE;
   // Allow at least 1s of ultra HD (25Mbps).
   const int32_t workableBlocks =
       25 * 1024 * 1024 / 8 / MediaBlockCacheBase::BLOCK_SIZE;
@@ -188,18 +188,19 @@ bool MemoryBlockCache::EnsureBufferCanCo
     // limit.
     // The alternative would have been to reserve the space first with
     // `atomic += extra` and then undo it with `atomic -= extra` in case of
     // failure; but this would have meant potentially preventing other (small
     // but successful) allocations.
     static const size_t sysmem =
         std::max<size_t>(PR_GetPhysicalMemorySize(), 32 * 1024 * 1024);
     const size_t limit = std::min(
-        size_t(StaticPrefs::MediaMemoryCachesCombinedLimitKb()) * 1024,
-        sysmem * StaticPrefs::MediaMemoryCachesCombinedLimitPcSysmem() / 100);
+        size_t(StaticPrefs::media_memory_caches_combined_limit_kb()) * 1024,
+        sysmem * StaticPrefs::media_memory_caches_combined_limit_pc_sysmem() /
+            100);
     const size_t currentSizes = static_cast<size_t>(gCombinedSizes);
     if (currentSizes + extra > limit) {
       LOG("EnsureBufferCanContain(%zu) - buffer size %zu, wanted + %zu = %zu;"
           " combined sizes %zu + %zu > limit %zu",
           aContentLength, initialLength, extra, desiredLength, currentSizes,
           extra, limit);
       return false;
     }
--- a/dom/media/VideoUtils.cpp
+++ b/dom/media/VideoUtils.cpp
@@ -157,17 +157,17 @@ void DownmixStereoToMono(mozilla::AudioD
   }
 }
 
 uint32_t DecideAudioPlaybackChannels(const AudioInfo& info) {
   if (StaticPrefs::accessibility_monoaudio_enable()) {
     return 1;
   }
 
-  if (StaticPrefs::MediaForcestereoEnabled()) {
+  if (StaticPrefs::media_forcestereo_enabled()) {
     return 2;
   }
 
   return info.mChannels;
 }
 
 bool IsDefaultPlaybackDeviceMono() {
   return CubebUtils::MaxNumberOfChannels() == 1;
--- a/dom/media/eme/MediaKeySystemAccess.cpp
+++ b/dom/media/eme/MediaKeySystemAccess.cpp
@@ -107,17 +107,18 @@ static MediaKeySystemStatus EnsureCDMIns
   }
 
   return MediaKeySystemStatus::Available;
 }
 
 /* static */
 MediaKeySystemStatus MediaKeySystemAccess::GetKeySystemStatus(
     const nsAString& aKeySystem, nsACString& aOutMessage) {
-  MOZ_ASSERT(StaticPrefs::MediaEmeEnabled() || IsClearkeyKeySystem(aKeySystem));
+  MOZ_ASSERT(StaticPrefs::media_eme_enabled() ||
+             IsClearkeyKeySystem(aKeySystem));
 
   if (IsClearkeyKeySystem(aKeySystem)) {
     return EnsureCDMInstalled(aKeySystem, aOutMessage);
   }
 
   if (IsWidevineKeySystem(aKeySystem)) {
     if (Preferences::GetBool("media.gmp-widevinecdm.visible", false)) {
       if (!Preferences::GetBool("media.gmp-widevinecdm.enabled", false)) {
@@ -250,17 +251,17 @@ static nsTArray<KeySystemConfig> GetSupp
       clearkey.mInitDataTypes.AppendElement(NS_LITERAL_STRING("cenc"));
       clearkey.mInitDataTypes.AppendElement(NS_LITERAL_STRING("keyids"));
       clearkey.mInitDataTypes.AppendElement(NS_LITERAL_STRING("webm"));
       clearkey.mPersistentState = KeySystemFeatureSupport::Requestable;
       clearkey.mDistinctiveIdentifier = KeySystemFeatureSupport::Prohibited;
       clearkey.mSessionTypes.AppendElement(MediaKeySessionType::Temporary);
       clearkey.mEncryptionSchemes.AppendElement(NS_LITERAL_STRING("cenc"));
       // We do not have support for cbcs in clearkey yet. See bug 1516673.
-      if (StaticPrefs::MediaClearkeyPersistentLicenseEnabled()) {
+      if (StaticPrefs::media_clearkey_persistent_license_enabled()) {
         clearkey.mSessionTypes.AppendElement(
             MediaKeySessionType::Persistent_license);
       }
 #if defined(XP_WIN)
       // Clearkey CDM uses WMF's H.264 decoder on Windows.
       if (WMFDecoderModule::HasH264()) {
         clearkey.mMP4.SetCanDecryptAndDecode(EME_CODEC_H264);
       } else {
--- a/dom/media/eme/MediaKeySystemAccessManager.cpp
+++ b/dom/media/eme/MediaKeySystemAccessManager.cpp
@@ -96,17 +96,17 @@ void MediaKeySystemAccessManager::Reques
     // supported.
     aPromise->MaybeReject(NS_ERROR_DOM_NOT_SUPPORTED_ERR,
                           NS_LITERAL_CSTRING("Key system is unsupported"));
     diagnostics.StoreMediaKeySystemAccess(mWindow->GetExtantDoc(), aKeySystem,
                                           false, __func__);
     return;
   }
 
-  if (!StaticPrefs::MediaEmeEnabled() && !IsClearkeyKeySystem(aKeySystem)) {
+  if (!StaticPrefs::media_eme_enabled() && !IsClearkeyKeySystem(aKeySystem)) {
     // EME disabled by user, send notification to chrome so UI can inform user.
     // Clearkey is allowed even when EME is disabled because we want the pref
     // "media.eme.enabled" only taking effect on proprietary DRMs.
     MediaKeySystemAccess::NotifyObservers(mWindow, aKeySystem,
                                           MediaKeySystemStatus::Api_disabled);
     aPromise->MaybeReject(NS_ERROR_DOM_NOT_SUPPORTED_ERR,
                           NS_LITERAL_CSTRING("EME has been preffed off"));
     diagnostics.StoreMediaKeySystemAccess(mWindow->GetExtantDoc(), aKeySystem,
--- a/dom/media/encoder/MediaEncoder.cpp
+++ b/dom/media/encoder/MediaEncoder.cpp
@@ -985,17 +985,17 @@ void MediaEncoder::Stop() {
 
   if (mVideoTrack) {
     RemoveMediaStreamTrack(mVideoTrack);
   }
 }
 
 #ifdef MOZ_WEBM_ENCODER
 bool MediaEncoder::IsWebMEncoderEnabled() {
-  return StaticPrefs::MediaEncoderWebMEnabled();
+  return StaticPrefs::media_encoder_webm_enabled();
 }
 #endif
 
 void MediaEncoder::NotifyInitialized() {
   MOZ_ASSERT(mEncoderThread->IsCurrentThreadIn());
 
   if (mInitialized) {
     // This could happen if an encoder re-inits due to a resolution change.
--- a/dom/media/flac/FlacDecoder.cpp
+++ b/dom/media/flac/FlacDecoder.cpp
@@ -8,17 +8,17 @@
 #include "MediaContainerType.h"
 #include "mozilla/StaticPrefs.h"
 
 namespace mozilla {
 
 /* static */
 bool FlacDecoder::IsEnabled() {
 #ifdef MOZ_FFVPX
-  return StaticPrefs::MediaFlacEnabled();
+  return StaticPrefs::media_flac_enabled();
 #else
   // Until bug 1295886 is fixed.
   return false;
 #endif
 }
 
 /* static */
 bool FlacDecoder::IsSupportedType(const MediaContainerType& aContainerType) {
--- a/dom/media/gmp/ChromiumCDMParent.cpp
+++ b/dom/media/gmp/ChromiumCDMParent.cpp
@@ -27,17 +27,17 @@ namespace mozilla {
 namespace gmp {
 
 using namespace eme;
 
 ChromiumCDMParent::ChromiumCDMParent(GMPContentParent* aContentParent,
                                      uint32_t aPluginId)
     : mPluginId(aPluginId),
       mContentParent(aContentParent),
-      mVideoShmemLimit(StaticPrefs::MediaEmeChromiumApiVideoShmems()) {
+      mVideoShmemLimit(StaticPrefs::media_eme_chromium_api_video_shmems()) {
   GMP_LOG(
       "ChromiumCDMParent::ChromiumCDMParent(this=%p, contentParent=%p, id=%u)",
       this, aContentParent, aPluginId);
 }
 
 RefPtr<ChromiumCDMParent::InitPromise> ChromiumCDMParent::Init(
     ChromiumCDMCallback* aCDMCallback, bool aAllowDistinctiveIdentifier,
     bool aAllowPersistentState, nsIEventTarget* aMainThread) {
--- a/dom/media/gmp/GMPServiceParent.cpp
+++ b/dom/media/gmp/GMPServiceParent.cpp
@@ -768,17 +768,17 @@ already_AddRefed<GMPParent> GeckoMediaPl
   }
 
   return nullptr;
 }
 
 RefPtr<GMPParent> CreateGMPParent(AbstractThread* aMainThread) {
 #if defined(XP_LINUX) && defined(MOZ_SANDBOX)
   if (!SandboxInfo::Get().CanSandboxMedia()) {
-    if (!StaticPrefs::MediaGmpInsecureAllow()) {
+    if (!StaticPrefs::media_gmp_insecure_allow()) {
       NS_WARNING("Denying media plugin load due to lack of sandboxing.");
       return nullptr;
     }
     NS_WARNING("Loading media plugin despite lack of sandboxing.");
   }
 #endif
   return new GMPParent(aMainThread);
 }
--- a/dom/media/hls/HLSDecoder.cpp
+++ b/dom/media/hls/HLSDecoder.cpp
@@ -91,17 +91,17 @@ void HLSResourceCallbacksSupport::OnErro
 }
 
 size_t HLSDecoder::sAllocatedInstances = 0;
 
 // static
 RefPtr<HLSDecoder> HLSDecoder::Create(MediaDecoderInit& aInit) {
   MOZ_ASSERT(NS_IsMainThread());
 
-  return sAllocatedInstances < StaticPrefs::MediaHlsMaxAllocations()
+  return sAllocatedInstances < StaticPrefs::media_hls_max_allocations()
              ? new HLSDecoder(aInit)
              : nullptr;
 }
 
 HLSDecoder::HLSDecoder(MediaDecoderInit& aInit) : MediaDecoder(aInit) {
   MOZ_ASSERT(NS_IsMainThread());
   sAllocatedInstances++;
   HLS_DEBUG("HLSDecoder", "HLSDecoder(): allocated=%zu", sAllocatedInstances);
@@ -125,17 +125,17 @@ MediaDecoderStateMachine* HLSDecoder::Cr
   init.mMediaDecoderOwnerID = mOwner;
   mReader = new MediaFormatReader(
       init, new HLSDemuxer(mHLSResourceWrapper->GetPlayerId()));
 
   return new MediaDecoderStateMachine(this, mReader);
 }
 
 bool HLSDecoder::IsEnabled() {
-  return StaticPrefs::MediaHlsEnabled() && (jni::GetAPIVersion() >= 16);
+  return StaticPrefs::media_hls_enabled() && (jni::GetAPIVersion() >= 16);
 }
 
 bool HLSDecoder::IsSupportedType(const MediaContainerType& aContainerType) {
   return IsEnabled() && DecoderTraits::IsHttpLiveStreamingType(aContainerType);
 }
 
 nsresult HLSDecoder::Load(nsIChannel* aChannel) {
   MOZ_ASSERT(NS_IsMainThread());
--- a/dom/media/ipc/GpuDecoderModule.cpp
+++ b/dom/media/ipc/GpuDecoderModule.cpp
@@ -40,17 +40,17 @@ bool GpuDecoderModule::Supports(const Tr
 static inline bool IsRemoteAcceleratedCompositor(KnowsCompositor* aKnows) {
   TextureFactoryIdentifier ident = aKnows->GetTextureFactoryIdentifier();
   return ident.mParentBackend != LayersBackend::LAYERS_BASIC &&
          ident.mParentProcessType == GeckoProcessType_GPU;
 }
 
 already_AddRefed<MediaDataDecoder> GpuDecoderModule::CreateVideoDecoder(
     const CreateDecoderParams& aParams) {
-  if (!StaticPrefs::MediaGpuProcessDecoder() || !aParams.mKnowsCompositor ||
+  if (!StaticPrefs::media_gpu_process_decoder() || !aParams.mKnowsCompositor ||
       !IsRemoteAcceleratedCompositor(aParams.mKnowsCompositor)) {
     return mWrapped->CreateVideoDecoder(aParams);
   }
 
   RefPtr<GpuRemoteVideoDecoderChild> child = new GpuRemoteVideoDecoderChild();
   SynchronousTask task("InitIPDL");
   MediaResult result(NS_OK);
   RemoteDecoderManagerChild::GetManagerThread()->Dispatch(
--- a/dom/media/ipc/RDDProcessHost.cpp
+++ b/dom/media/ipc/RDDProcessHost.cpp
@@ -73,17 +73,17 @@ bool RDDProcessHost::Launch(StringVector
   return true;
 }
 
 bool RDDProcessHost::WaitForLaunch() {
   if (mLaunchPhase == LaunchPhase::Complete) {
     return !!mRDDChild;
   }
 
-  int32_t timeoutMs = StaticPrefs::MediaRddProcessStartupTimeoutMs();
+  int32_t timeoutMs = StaticPrefs::media_rdd_process_startup_timeout_ms();
 
   // If one of the following environment variables are set we can
   // effectively ignore the timeout - as we can guarantee the RDD
   // process will be terminated
   if (PR_GetEnv("MOZ_DEBUG_CHILD_PROCESS") ||
       PR_GetEnv("MOZ_DEBUG_CHILD_PAUSE")) {
     timeoutMs = 0;
   }
--- a/dom/media/ipc/RDDProcessManager.cpp
+++ b/dom/media/ipc/RDDProcessManager.cpp
@@ -237,17 +237,17 @@ void RDDProcessManager::DestroyProcess()
   CrashReporter::AnnotateCrashReport(
       CrashReporter::Annotation::RDDProcessStatus,
       NS_LITERAL_CSTRING("Destroyed"));
 }
 
 bool RDDProcessManager::CreateContentBridge(
     base::ProcessId aOtherProcess,
     ipc::Endpoint<PRemoteDecoderManagerChild>* aOutRemoteDecoderManager) {
-  if (!EnsureRDDReady() || !StaticPrefs::MediaRddProcessEnabled()) {
+  if (!EnsureRDDReady() || !StaticPrefs::media_rdd_process_enabled()) {
     return false;
   }
 
   ipc::Endpoint<PRemoteDecoderManagerParent> parentPipe;
   ipc::Endpoint<PRemoteDecoderManagerChild> childPipe;
 
   nsresult rv = PRemoteDecoderManager::CreateEndpoints(
       mRDDChild->OtherPid(), aOtherProcess, &parentPipe, &childPipe);
--- a/dom/media/ipc/RemoteDecoderModule.cpp
+++ b/dom/media/ipc/RemoteDecoderModule.cpp
@@ -30,21 +30,21 @@ using namespace layers;
 RemoteDecoderModule::RemoteDecoderModule()
     : mManagerThread(RemoteDecoderManagerChild::GetManagerThread()) {}
 
 bool RemoteDecoderModule::SupportsMimeType(
     const nsACString& aMimeType, DecoderDoctorDiagnostics* aDiagnostics) const {
   bool supports = false;
 
 #ifdef MOZ_AV1
-  if (StaticPrefs::MediaAv1Enabled()) {
+  if (StaticPrefs::media_av1_enabled()) {
     supports |= AOMDecoder::IsAV1(aMimeType);
   }
 #endif
-  if (StaticPrefs::MediaRddVorbisEnabled()) {
+  if (StaticPrefs::media_rdd_vorbis_enabled()) {
     supports |= VorbisDataDecoder::IsVorbis(aMimeType);
   }
 
   MOZ_LOG(
       sPDMLog, LogLevel::Debug,
       ("Sandbox decoder %s requested type", supports ? "supports" : "rejects"));
   return supports;
 }
--- a/dom/media/ipc/RemoteVideoDecoder.cpp
+++ b/dom/media/ipc/RemoteVideoDecoder.cpp
@@ -311,17 +311,17 @@ RemoteVideoDecoderParent::RemoteVideoDec
     MOZ_ASSERT(false,
                "Can't use RemoteVideoDecoder in the GPU process on non-Windows "
                "platforms yet");
 #endif
   }
 
 #ifdef MOZ_AV1
   if (AOMDecoder::IsAV1(params.mConfig.mMimeType)) {
-    if (StaticPrefs::MediaAv1UseDav1d()) {
+    if (StaticPrefs::media_av1_use_dav1d()) {
       mDecoder = new DAV1DDecoder(params);
     } else {
       mDecoder = new AOMDecoder(params);
     }
   }
 #endif
 
   if (NS_FAILED(error)) {
--- a/dom/media/mediacapabilities/MediaCapabilities.cpp
+++ b/dom/media/mediacapabilities/MediaCapabilities.cpp
@@ -529,17 +529,17 @@ already_AddRefed<layers::KnowsCompositor
   RefPtr<layers::KnowsCompositor> knows = layerManager->AsKnowsCompositor();
   if (NS_WARN_IF(!knows)) {
     return nullptr;
   }
   return knows->GetForMedia().forget();
 }
 
 bool MediaCapabilities::Enabled(JSContext* aCx, JSObject* aGlobal) {
-  return StaticPrefs::MediaCapabilitiesEnabled();
+  return StaticPrefs::media_media_capabilities_enabled();
 }
 
 JSObject* MediaCapabilities::WrapObject(JSContext* aCx,
                                         JS::Handle<JSObject*> aGivenProto) {
   return MediaCapabilities_Binding::Wrap(aCx, this, aGivenProto);
 }
 
 NS_INTERFACE_MAP_BEGIN_CYCLE_COLLECTION(MediaCapabilities)
--- a/dom/media/mediasink/AudioSink.cpp
+++ b/dom/media/mediasink/AudioSink.cpp
@@ -42,17 +42,17 @@ AudioSink::AudioSink(AbstractThread* aTh
       mWritten(0),
       mErrored(false),
       mPlaybackComplete(false),
       mOwnerThread(aThread),
       mProcessedQueueLength(0),
       mFramesParsed(0),
       mIsAudioDataAudible(false),
       mAudioQueue(aAudioQueue) {
-  bool resampling = StaticPrefs::MediaResamplingEnabled();
+  bool resampling = StaticPrefs::media_resampling_enabled();
 
   if (resampling) {
     mOutputRate = 48000;
   } else if (mInfo.mRate == 44100 || mInfo.mRate == 48000) {
     // The original rate is of good quality and we want to minimize unecessary
     // resampling. The common scenario being that the sampling rate is one or
     // the other, this allows to minimize audio quality regression and hoping
     // content provider want change from those rates mid-stream.
@@ -176,17 +176,17 @@ nsresult AudioSink::InitializeAudioStrea
   // When AudioQueue is empty, there is no way to know the channel layout of
   // the coming audio data, so we use the predefined channel map instead.
   AudioConfig::ChannelLayout::ChannelMap channelMap =
       mConverter ? mConverter->OutputConfig().Layout().Map()
                  : AudioConfig::ChannelLayout(mOutputChannels).Map();
   // The layout map used here is already processed by mConverter with
   // mOutputChannels into SMPTE format, so there is no need to worry if
   // StaticPrefs::accessibility_monoaudio_enable() or
-  // StaticPrefs::MediaForcestereoEnabled() is applied.
+  // StaticPrefs::media_forcestereo_enabled() is applied.
   nsresult rv = mAudioStream->Init(mOutputChannels, channelMap, mOutputRate,
                                    aParams.mSink);
   if (NS_FAILED(rv)) {
     mAudioStream->Shutdown();
     mAudioStream = nullptr;
     return rv;
   }
 
--- a/dom/media/mediasink/VideoSink.cpp
+++ b/dom/media/mediasink/VideoSink.cpp
@@ -99,17 +99,17 @@ VideoSink::VideoSink(AbstractThread* aTh
       mProducerID(ImageContainer::AllocateProducerID()),
       mFrameStats(aFrameStats),
       mOldCompositorDroppedCount(mContainer ? mContainer->GetDroppedImageCount()
                                             : 0),
       mPendingDroppedCount(0),
       mHasVideo(false),
       mUpdateScheduler(aThread),
       mVideoQueueSendToCompositorSize(aVQueueSentToCompositerSize),
-      mMinVideoQueueSize(StaticPrefs::MediaRuinAvSyncEnabled() ? 1 : 0)
+      mMinVideoQueueSize(StaticPrefs::media_ruin_av_sync_enabled() ? 1 : 0)
 #ifdef XP_WIN
       ,
       mHiResTimersRequested(false)
 #endif
 {
   MOZ_ASSERT(mAudioSink, "AudioSink should exist.");
 
   if (StaticPrefs::browser_measurement_render_anims_and_video_solid() &&
--- a/dom/media/mediasource/MediaSource.cpp
+++ b/dom/media/mediasource/MediaSource.cpp
@@ -112,21 +112,21 @@ nsresult MediaSource::IsTypeSupported(co
       mimeType == MEDIAMIMETYPE("audio/mp4")) {
     if (!Preferences::GetBool("media.mediasource.mp4.enabled", false)) {
       return NS_ERROR_DOM_NOT_SUPPORTED_ERR;
     }
     return NS_OK;
   }
   if (mimeType == MEDIAMIMETYPE("video/webm")) {
     if (!(Preferences::GetBool("media.mediasource.webm.enabled", false) ||
-          StaticPrefs::MediaCapabilitiesEnabled() ||
+          StaticPrefs::media_media_capabilities_enabled() ||
           containerType->ExtendedType().Codecs().Contains(
               NS_LITERAL_STRING("vp8")) ||
 #ifdef MOZ_AV1
-          (StaticPrefs::MediaAv1Enabled() &&
+          (StaticPrefs::media_av1_enabled() &&
            IsAV1CodecString(
                containerType->ExtendedType().Codecs().AsString())) ||
 #endif
           IsWebMForced(aDiagnostics))) {
       return NS_ERROR_DOM_NOT_SUPPORTED_ERR;
     }
     return NS_OK;
   }
--- a/dom/media/mediasource/TrackBuffersManager.cpp
+++ b/dom/media/mediasource/TrackBuffersManager.cpp
@@ -929,17 +929,17 @@ void TrackBuffersManager::ResetDemuxingS
              &TrackBuffersManager::OnDemuxerInitFailed)
       ->Track(mDemuxerInitRequest);
 }
 
 void TrackBuffersManager::OnDemuxerResetDone(const MediaResult& aResult) {
   MOZ_ASSERT(OnTaskQueue());
   mDemuxerInitRequest.Complete();
 
-  if (NS_FAILED(aResult) && StaticPrefs::MediaPlaybackWarningsAsErrors()) {
+  if (NS_FAILED(aResult) && StaticPrefs::media_playback_warnings_as_errors()) {
     RejectAppend(aResult, __func__);
     return;
   }
 
   // mInputDemuxer shouldn't have been destroyed while a demuxer init/reset
   // request was being processed. See bug 1239983.
   MOZ_DIAGNOSTIC_ASSERT(mInputDemuxer);
 
@@ -1026,17 +1026,17 @@ void TrackBuffersManager::Initialization
 }
 
 void TrackBuffersManager::OnDemuxerInitDone(const MediaResult& aResult) {
   MOZ_ASSERT(OnTaskQueue());
   MOZ_DIAGNOSTIC_ASSERT(mInputDemuxer, "mInputDemuxer has been destroyed");
 
   mDemuxerInitRequest.Complete();
 
-  if (NS_FAILED(aResult) && StaticPrefs::MediaPlaybackWarningsAsErrors()) {
+  if (NS_FAILED(aResult) && StaticPrefs::media_playback_warnings_as_errors()) {
     RejectAppend(aResult, __func__);
     return;
   }
 
   MediaInfo info;
 
   uint32_t numVideos = mInputDemuxer->GetNumberTracks(TrackInfo::kVideoTrack);
   if (numVideos) {
--- a/dom/media/mp4/MP4Decoder.cpp
+++ b/dom/media/mp4/MP4Decoder.cpp
@@ -187,17 +187,17 @@ bool MP4Decoder::IsH264(const nsACString
 }
 
 /* static */
 bool MP4Decoder::IsAAC(const nsACString& aMimeType) {
   return aMimeType.EqualsLiteral("audio/mp4a-latm");
 }
 
 /* static */
-bool MP4Decoder::IsEnabled() { return StaticPrefs::MediaMp4Enabled(); }
+bool MP4Decoder::IsEnabled() { return StaticPrefs::media_mp4_enabled(); }
 
 /* static */
 nsTArray<UniquePtr<TrackInfo>> MP4Decoder::GetTracksInfo(
     const MediaContainerType& aType) {
   MediaResult rv = NS_OK;
   return GetTracksInfo(aType, rv);
 }
 
--- a/dom/media/mp4/MP4Demuxer.cpp
+++ b/dom/media/mp4/MP4Demuxer.cpp
@@ -145,30 +145,30 @@ RefPtr<MP4Demuxer::InitPromise> MP4Demux
   nsresult rv = metadata.Parse();
   if (NS_FAILED(rv)) {
     return InitPromise::CreateAndReject(
         MediaResult(rv, RESULT_DETAIL("Parse MP4 metadata failed")), __func__);
   }
 
   auto audioTrackCount = metadata.GetNumberTracks(TrackInfo::kAudioTrack);
   if (audioTrackCount.Ref() == MP4Metadata::NumberTracksError()) {
-    if (StaticPrefs::MediaPlaybackWarningsAsErrors()) {
+    if (StaticPrefs::media_playback_warnings_as_errors()) {
       return InitPromise::CreateAndReject(
           MediaResult(
               NS_ERROR_DOM_MEDIA_DEMUXER_ERR,
               RESULT_DETAIL("Invalid audio track (%s)",
                             audioTrackCount.Result().Description().get())),
           __func__);
     }
     audioTrackCount.Ref() = 0;
   }
 
   auto videoTrackCount = metadata.GetNumberTracks(TrackInfo::kVideoTrack);
   if (videoTrackCount.Ref() == MP4Metadata::NumberTracksError()) {
-    if (StaticPrefs::MediaPlaybackWarningsAsErrors()) {
+    if (StaticPrefs::media_playback_warnings_as_errors()) {
       return InitPromise::CreateAndReject(
           MediaResult(
               NS_ERROR_DOM_MEDIA_DEMUXER_ERR,
               RESULT_DETAIL("Invalid video track (%s)",
                             videoTrackCount.Result().Description().get())),
           __func__);
     }
     videoTrackCount.Ref() = 0;
@@ -191,17 +191,17 @@ RefPtr<MP4Demuxer::InitPromise> MP4Demux
     result = std::move(videoTrackCount.Result());
   }
 
   if (audioTrackCount.Ref() != 0) {
     for (size_t i = 0; i < audioTrackCount.Ref(); i++) {
       MP4Metadata::ResultAndTrackInfo info =
           metadata.GetTrackInfo(TrackInfo::kAudioTrack, i);
       if (!info.Ref()) {
-        if (StaticPrefs::MediaPlaybackWarningsAsErrors()) {
+        if (StaticPrefs::media_playback_warnings_as_errors()) {
           return InitPromise::CreateAndReject(
               MediaResult(NS_ERROR_DOM_MEDIA_DEMUXER_ERR,
                           RESULT_DETAIL("Invalid MP4 audio track (%s)",
                                         info.Result().Description().get())),
               __func__);
         }
         if (result == NS_OK) {
           result =
@@ -228,17 +228,17 @@ RefPtr<MP4Demuxer::InitPromise> MP4Demux
     }
   }
 
   if (videoTrackCount.Ref() != 0) {
     for (size_t i = 0; i < videoTrackCount.Ref(); i++) {
       MP4Metadata::ResultAndTrackInfo info =
           metadata.GetTrackInfo(TrackInfo::kVideoTrack, i);
       if (!info.Ref()) {
-        if (StaticPrefs::MediaPlaybackWarningsAsErrors()) {
+        if (StaticPrefs::media_playback_warnings_as_errors()) {
           return InitPromise::CreateAndReject(
               MediaResult(NS_ERROR_DOM_MEDIA_DEMUXER_ERR,
                           RESULT_DETAIL("Invalid MP4 video track (%s)",
                                         info.Result().Description().get())),
               __func__);
         }
         if (result == NS_OK) {
           result =
--- a/dom/media/ogg/OggDecoder.cpp
+++ b/dom/media/ogg/OggDecoder.cpp
@@ -9,17 +9,17 @@
 #include "MediaDecoder.h"
 #include "mozilla/StaticPrefs.h"
 #include "nsMimeTypes.h"
 
 namespace mozilla {
 
 /* static */
 bool OggDecoder::IsSupportedType(const MediaContainerType& aContainerType) {
-  if (!StaticPrefs::MediaOggEnabled()) {
+  if (!StaticPrefs::media_ogg_enabled()) {
     return false;
   }
 
   if (aContainerType.Type() != MEDIAMIMETYPE(AUDIO_OGG) &&
       aContainerType.Type() != MEDIAMIMETYPE(VIDEO_OGG) &&
       aContainerType.Type() != MEDIAMIMETYPE("application/ogg")) {
     return false;
   }
--- a/dom/media/platforms/PDMFactory.cpp
+++ b/dom/media/platforms/PDMFactory.cpp
@@ -329,75 +329,77 @@ bool PDMFactory::Supports(const TrackInf
   }
   RefPtr<PlatformDecoderModule> current = GetDecoder(aTrackInfo, aDiagnostics);
   return !!current;
 }
 
 void PDMFactory::CreatePDMs() {
   RefPtr<PlatformDecoderModule> m;
 
-  if (StaticPrefs::MediaUseBlankDecoder()) {
+  if (StaticPrefs::media_use_blank_decoder()) {
     m = CreateBlankDecoderModule();
     StartupPDM(m);
     // The Blank PDM SupportsMimeType reports true for all codecs; the creation
     // of its decoder is infallible. As such it will be used for all media, we
     // can stop creating more PDM from this point.
     return;
   }
 
-  if (StaticPrefs::MediaRddProcessEnabled() && BrowserTabsRemoteAutostart()) {
+  if (StaticPrefs::media_rdd_process_enabled() &&
+      BrowserTabsRemoteAutostart()) {
     m = new RemoteDecoderModule;
     StartupPDM(m);
   }
 
 #ifdef XP_WIN
-  if (StaticPrefs::MediaWmfEnabled() && !IsWin7AndPre2000Compatible()) {
+  if (StaticPrefs::media_wmf_enabled() && !IsWin7AndPre2000Compatible()) {
     m = new WMFDecoderModule();
     RefPtr<PlatformDecoderModule> remote = new GpuDecoderModule(m);
     StartupPDM(remote);
     mWMFFailedToLoad = !StartupPDM(m);
   } else {
-    mWMFFailedToLoad = StaticPrefs::MediaDecoderDoctorWmfDisabledIsFailure();
+    mWMFFailedToLoad =
+        StaticPrefs::media_decoder_doctor_wmf_disabled_is_failure();
   }
 #endif
 #ifdef MOZ_OMX
-  if (StaticPrefs::MediaOmxEnabled()) {
+  if (StaticPrefs::media_omx_enabled()) {
     m = OmxDecoderModule::Create();
     StartupPDM(m);
   }
 #endif
 #ifdef MOZ_FFVPX
-  if (StaticPrefs::MediaFfvpxEnabled()) {
+  if (StaticPrefs::media_ffvpx_enabled()) {
     m = FFVPXRuntimeLinker::CreateDecoderModule();
     StartupPDM(m);
   }
 #endif
 #ifdef MOZ_FFMPEG
-  if (StaticPrefs::MediaFfmpegEnabled()) {
+  if (StaticPrefs::media_ffmpeg_enabled()) {
     m = FFmpegRuntimeLinker::CreateDecoderModule();
     mFFmpegFailedToLoad = !StartupPDM(m);
   } else {
     mFFmpegFailedToLoad = false;
   }
 #endif
 #ifdef MOZ_APPLEMEDIA
   m = new AppleDecoderModule();
   StartupPDM(m);
 #endif
 #ifdef MOZ_WIDGET_ANDROID
-  if (StaticPrefs::MediaAndroidMediaCodecEnabled()) {
+  if (StaticPrefs::media_android_media_codec_enabled()) {
     m = new AndroidDecoderModule();
-    StartupPDM(m, StaticPrefs::MediaAndroidMediaCodecPreferred());
+    StartupPDM(m, StaticPrefs::media_android_media_codec_preferred());
   }
 #endif
 
   m = new AgnosticDecoderModule();
   StartupPDM(m);
 
-  if (StaticPrefs::MediaGmpDecoderEnabled()) {
+  if (StaticPrefs::media_gmp_decoder_enabled()) {
     m = new GMPDecoderModule();
     mGMPPDMFailedToStartup = !StartupPDM(m);
   } else {
     mGMPPDMFailedToStartup = false;
   }
 }
 
 void PDMFactory::CreateNullPDM() {
--- a/dom/media/platforms/agnostic/AgnosticDecoderModule.cpp
+++ b/dom/media/platforms/agnostic/AgnosticDecoderModule.cpp
@@ -20,26 +20,27 @@
 
 namespace mozilla {
 
 bool AgnosticDecoderModule::SupportsMimeType(
     const nsACString& aMimeType, DecoderDoctorDiagnostics* aDiagnostics) const {
   bool supports =
       VPXDecoder::IsVPX(aMimeType) || OpusDataDecoder::IsOpus(aMimeType) ||
       WaveDataDecoder::IsWave(aMimeType) || TheoraDecoder::IsTheora(aMimeType);
-  if (!StaticPrefs::MediaRddVorbisEnabled() ||
-      !StaticPrefs::MediaRddProcessEnabled() || !BrowserTabsRemoteAutostart()) {
+  if (!StaticPrefs::media_rdd_vorbis_enabled() ||
+      !StaticPrefs::media_rdd_process_enabled() ||
+      !BrowserTabsRemoteAutostart()) {
     supports |= VorbisDataDecoder::IsVorbis(aMimeType);
   }
 #ifdef MOZ_AV1
   // We remove support for decoding AV1 here if RDD is enabled so that
   // decoding on the content process doesn't accidentally happen in case
   // something goes wrong with launching the RDD process.
-  if (StaticPrefs::MediaAv1Enabled() &&
-      !StaticPrefs::MediaRddProcessEnabled()) {
+  if (StaticPrefs::media_av1_enabled() &&
+      !StaticPrefs::media_rdd_process_enabled()) {
     supports |= AOMDecoder::IsAV1(aMimeType);
   }
 #endif
   MOZ_LOG(sPDMLog, LogLevel::Debug,
           ("Agnostic decoder %s requested type",
            supports ? "supports" : "rejects"));
   return supports;
 }
@@ -49,19 +50,19 @@ already_AddRefed<MediaDataDecoder> Agnos
   RefPtr<MediaDataDecoder> m;
 
   if (VPXDecoder::IsVPX(aParams.mConfig.mMimeType)) {
     m = new VPXDecoder(aParams);
   }
 #ifdef MOZ_AV1
   // see comment above about AV1 and the RDD process
   else if (AOMDecoder::IsAV1(aParams.mConfig.mMimeType) &&
-           !StaticPrefs::MediaRddProcessEnabled() &&
-           StaticPrefs::MediaAv1Enabled()) {
-    if (StaticPrefs::MediaAv1UseDav1d()) {
+           !StaticPrefs::media_rdd_process_enabled() &&
+           StaticPrefs::media_av1_enabled()) {
+    if (StaticPrefs::media_av1_use_dav1d()) {
       m = new DAV1DDecoder(aParams);
     } else {
       m = new AOMDecoder(aParams);
     }
   }
 #endif
   else if (TheoraDecoder::IsTheora(aParams.mConfig.mMimeType)) {
     m = new TheoraDecoder(aParams);
--- a/dom/media/platforms/agnostic/eme/EMEDecoderModule.cpp
+++ b/dom/media/platforms/agnostic/eme/EMEDecoderModule.cpp
@@ -378,17 +378,17 @@ static already_AddRefed<MediaDataDecoder
       new EMEMediaDataDecoderProxy(thread.forget(), aProxy, aParams));
   return decoder.forget();
 }
 
 already_AddRefed<MediaDataDecoder> EMEDecoderModule::CreateVideoDecoder(
     const CreateDecoderParams& aParams) {
   MOZ_ASSERT(aParams.mConfig.mCrypto.IsEncrypted());
 
-  if (StaticPrefs::MediaEmeVideoBlank()) {
+  if (StaticPrefs::media_eme_video_blank()) {
     EME_LOG("EMEDecoderModule::CreateVideoDecoder() creating a blank decoder.");
     RefPtr<PlatformDecoderModule> m(CreateBlankDecoderModule());
     return m->CreateVideoDecoder(aParams);
   }
 
   if (SupportsMimeType(aParams.mConfig.mMimeType, nullptr)) {
     // GMP decodes. Assume that means it can decrypt too.
     RefPtr<MediaDataDecoderProxy> wrapper =
@@ -413,17 +413,17 @@ already_AddRefed<MediaDataDecoder> EMEDe
 already_AddRefed<MediaDataDecoder> EMEDecoderModule::CreateAudioDecoder(
     const CreateDecoderParams& aParams) {
   MOZ_ASSERT(aParams.mConfig.mCrypto.IsEncrypted());
 
   // We don't support using the GMP to decode audio.
   MOZ_ASSERT(!SupportsMimeType(aParams.mConfig.mMimeType, nullptr));
   MOZ_ASSERT(mPDM);
 
-  if (StaticPrefs::MediaEmeAudioBlank()) {
+  if (StaticPrefs::media_eme_audio_blank()) {
     EME_LOG("EMEDecoderModule::CreateAudioDecoder() creating a blank decoder.");
     RefPtr<PlatformDecoderModule> m(CreateBlankDecoderModule());
     return m->CreateAudioDecoder(aParams);
   }
 
   UniquePtr<ADTSSampleConverter> converter = nullptr;
   if (MP4Decoder::IsAAC(aParams.mConfig.mMimeType)) {
     // The CDM expects encrypted AAC to be in ADTS format.
--- a/dom/media/platforms/ffmpeg/FFmpegDecoderModule.h
+++ b/dom/media/platforms/ffmpeg/FFmpegDecoderModule.h
@@ -35,17 +35,17 @@ class FFmpegDecoderModule : public Platf
     // Bug 1263836 will handle alpha scenario once implemented. It will shift
     // the check for alpha to PDMFactory but not itself remove the need for a
     // check.
     if (aParams.VideoConfig().HasAlpha()) {
       return nullptr;
     }
     if (VPXDecoder::IsVPX(aParams.mConfig.mMimeType) &&
         aParams.mOptions.contains(CreateDecoderParams::Option::LowLatency) &&
-        !StaticPrefs::MediaFfmpegLowLatencyEnabled()) {
+        !StaticPrefs::media_ffmpeg_low_latency_enabled()) {
       // We refuse to create a decoder with low latency enabled if it's VP8 or
       // VP9 unless specifically allowed: this will fallback to libvpx later.
       // We do allow it for h264.
       return nullptr;
     }
     RefPtr<MediaDataDecoder> decoder = new FFmpegVideoDecoder<V>(
         mLib, aParams.mTaskQueue, aParams.VideoConfig(),
         aParams.mKnowsCompositor, aParams.mImageContainer,
--- a/dom/media/platforms/ffmpeg/FFmpegLibWrapper.cpp
+++ b/dom/media/platforms/ffmpeg/FFmpegLibWrapper.cpp
@@ -39,17 +39,17 @@ FFmpegLibWrapper::LinkResult FFmpegLibWr
     if (macro == 57) {
       // Due to current AVCodecContext binary incompatibility we can only
       // support FFmpeg 57 at this stage.
       Unlink();
       return LinkResult::CannotUseLibAV57;
     }
 #ifdef MOZ_FFMPEG
     if (version < (54u << 16 | 35u << 8 | 1u) &&
-        !StaticPrefs::MediaLibavcodecAllowObsolete()) {
+        !StaticPrefs::media_libavcodec_allow_obsolete()) {
       // Refuse any libavcodec version prior to 54.35.1.
       // (Unless media.libavcodec.allow-obsolete==true)
       Unlink();
       return LinkResult::BlockedOldLibAVVersion;
     }
 #endif
   }
 
--- a/dom/media/platforms/wmf/DXVA2Manager.cpp
+++ b/dom/media/platforms/wmf/DXVA2Manager.cpp
@@ -411,17 +411,17 @@ D3D9DXVA2Manager::Init(layers::KnowsComp
   hr = d3d9Ex->GetAdapterIdentifier(D3DADAPTER_DEFAULT, 0, &adapter);
   if (!SUCCEEDED(hr)) {
     aFailureReason = nsPrintfCString(
         "IDirect3D9Ex::GetAdapterIdentifier failed with error %X", hr);
     return hr;
   }
 
   if ((adapter.VendorId == 0x1022 || adapter.VendorId == 0x1002) &&
-      !StaticPrefs::PDMWMFSkipBlacklist()) {
+      !StaticPrefs::media_wmf_skip_blacklist()) {
     for (const auto& model : sAMDPreUVD4) {
       if (adapter.DeviceId == model) {
         mIsAMDPreUVD4 = true;
         break;
       }
     }
   }
 
@@ -501,17 +501,17 @@ static uint32_t sDXVAVideosCount = 0;
 /* static */
 DXVA2Manager* DXVA2Manager::CreateD3D9DXVA(
     layers::KnowsCompositor* aKnowsCompositor, nsACString& aFailureReason) {
   MOZ_ASSERT(NS_IsMainThread());
   HRESULT hr;
 
   // DXVA processing takes up a lot of GPU resources, so limit the number of
   // videos we use DXVA with at any one time.
-  uint32_t dxvaLimit = StaticPrefs::PDMWMFMaxDXVAVideos();
+  uint32_t dxvaLimit = StaticPrefs::media_wmf_dxva_max_videos();
 
   if (sDXVAVideosCount == dxvaLimit) {
     aFailureReason.AssignLiteral("Too many DXVA videos playing");
     return nullptr;
   }
 
   nsAutoPtr<D3D9DXVA2Manager> d3d9Manager(new D3D9DXVA2Manager());
   hr = d3d9Manager->Init(aKnowsCompositor, aFailureReason);
@@ -675,32 +675,32 @@ D3D11DXVA2Manager::Init(layers::KnowsCom
   if (layers::ImageBridgeChild::GetSingleton() || !aKnowsCompositor) {
     // There's no proper KnowsCompositor for ImageBridge currently (and it
     // implements the interface), so just use that if it's available.
     mTextureClientAllocator = new D3D11RecycleAllocator(
         layers::ImageBridgeChild::GetSingleton().get(), mDevice,
         gfx::SurfaceFormat::NV12);
 
     if (ImageBridgeChild::GetSingleton() &&
-        StaticPrefs::PDMWMFUseSyncTexture() &&
+        StaticPrefs::media_wmf_use_sync_texture() &&
         mDevice != DeviceManagerDx::Get()->GetCompositorDevice()) {
       // We use a syncobject to avoid the cost of the mutex lock when
       // compositing, and because it allows color conversion ocurring directly
       // from this texture DXVA does not seem to accept IDXGIKeyedMutex textures
       // as input.
       mSyncObject = layers::SyncObjectClient::CreateSyncObjectClient(
           layers::ImageBridgeChild::GetSingleton()
               ->GetTextureFactoryIdentifier()
               .mSyncHandle,
           mDevice);
     }
   } else {
     mTextureClientAllocator = new D3D11RecycleAllocator(
         aKnowsCompositor, mDevice, gfx::SurfaceFormat::NV12);
-    if (StaticPrefs::PDMWMFUseSyncTexture()) {
+    if (StaticPrefs::media_wmf_use_sync_texture()) {
       // We use a syncobject to avoid the cost of the mutex lock when
       // compositing, and because it allows color conversion ocurring directly
       // from this texture DXVA does not seem to accept IDXGIKeyedMutex textures
       // as input.
       mSyncObject = layers::SyncObjectClient::CreateSyncObjectClient(
           aKnowsCompositor->GetTextureFactoryIdentifier().mSyncHandle, mDevice);
     }
   }
@@ -844,17 +844,17 @@ D3D11DXVA2Manager::InitInternal(layers::
   hr = adapter->GetDesc(&adapterDesc);
   if (!SUCCEEDED(hr)) {
     aFailureReason =
         nsPrintfCString("IDXGIAdapter::GetDesc failed with code %X", hr);
     return hr;
   }
 
   if ((adapterDesc.VendorId == 0x1022 || adapterDesc.VendorId == 0x1002) &&
-      !StaticPrefs::PDMWMFSkipBlacklist()) {
+      !StaticPrefs::media_wmf_skip_blacklist()) {
     for (const auto& model : sAMDPreUVD4) {
       if (adapterDesc.DeviceId == model) {
         mIsAMDPreUVD4 = true;
         break;
       }
     }
   }
 
@@ -1214,17 +1214,17 @@ already_AddRefed<ID3D11VideoDecoder> D3D
 }
 
 /* static */
 DXVA2Manager* DXVA2Manager::CreateD3D11DXVA(
     layers::KnowsCompositor* aKnowsCompositor, nsACString& aFailureReason,
     ID3D11Device* aDevice) {
   // DXVA processing takes up a lot of GPU resources, so limit the number of
   // videos we use DXVA with at any one time.
-  uint32_t dxvaLimit = StaticPrefs::PDMWMFMaxDXVAVideos();
+  uint32_t dxvaLimit = StaticPrefs::media_wmf_dxva_max_videos();
 
   if (sDXVAVideosCount == dxvaLimit) {
     aFailureReason.AssignLiteral("Too many DXVA videos playing");
     return nullptr;
   }
 
   nsAutoPtr<D3D11DXVA2Manager> manager(new D3D11DXVA2Manager());
   HRESULT hr = manager->Init(aKnowsCompositor, aFailureReason, aDevice);
@@ -1246,17 +1246,17 @@ DXVA2Manager::~DXVA2Manager() {
 }
 
 bool DXVA2Manager::IsUnsupportedResolution(const uint32_t& aWidth,
                                            const uint32_t& aHeight,
                                            const float& aFramerate) const {
   // AMD cards with UVD3 or earlier perform poorly trying to decode 1080p60 in
   // hardware, so use software instead. Pick 45 as an arbitrary upper bound for
   // the framerate we can handle.
-  return !StaticPrefs::PDMWMFAMDHighResEnabled() && mIsAMDPreUVD4 &&
+  return !StaticPrefs::media_wmf_amd_highres_enabled() && mIsAMDPreUVD4 &&
          (aWidth >= 1920 || aHeight >= 1088) && aFramerate > 45;
 }
 
 /* static */
 bool DXVA2Manager::IsNV12Supported(uint32_t aVendorID, uint32_t aDeviceID,
                                    const nsAString& aDriverVersionString) {
   if (aVendorID == 0x1022 || aVendorID == 0x1002) {
     // AMD
--- a/dom/media/platforms/wmf/WMFDecoderModule.cpp
+++ b/dom/media/platforms/wmf/WMFDecoderModule.cpp
@@ -69,32 +69,32 @@ static bool CanCreateMFTDecoder(const GU
 /* static */
 void WMFDecoderModule::Init() {
   MOZ_DIAGNOSTIC_ASSERT(NS_IsMainThread());
   bool testForVPx;
   if (XRE_IsContentProcess()) {
     // If we're in the content process and the UseGPUDecoder pref is set, it
     // means that we've given up on the GPU process (it's been crashing) so we
     // should disable DXVA
-    sDXVAEnabled = !StaticPrefs::MediaGpuProcessDecoder();
+    sDXVAEnabled = !StaticPrefs::media_gpu_process_decoder();
     // We need to test for VPX in the content process as the GPUDecoderModule
     // directly calls WMFDecoderModule::Supports in the content process.
     // This unnecessary requirement will be fixed in bug 1534815.
     testForVPx = true;
   } else if (XRE_IsGPUProcess()) {
     // Always allow DXVA in the GPU process.
     testForVPx = sDXVAEnabled = true;
   } else {
     // Only allow DXVA in the UI process if we aren't in e10s Firefox
     testForVPx = sDXVAEnabled = !mozilla::BrowserTabsRemoteAutostart();
   }
 
   sDXVAEnabled = sDXVAEnabled && gfx::gfxVars::CanUseHardwareVideoDecoding();
   testForVPx = testForVPx && gfx::gfxVars::CanUseHardwareVideoDecoding();
-  if (testForVPx && StaticPrefs::MediaWmfVp9Enabled()) {
+  if (testForVPx && StaticPrefs::media_wmf_vp9_enabled()) {
     gfx::WMFVPXVideoCrashGuard guard;
     if (!guard.Crashed()) {
       sUsableVPXMFT = CanCreateMFTDecoder(CLSID_WebmMfVpxDec);
     }
   }
 }
 
 /* static */
--- a/dom/media/platforms/wmf/WMFVideoMFTManager.cpp
+++ b/dom/media/platforms/wmf/WMFVideoMFTManager.cpp
@@ -438,21 +438,21 @@ class CreateDXVAManagerEvent : public Ru
         mBackend(LayersBackend::LAYERS_D3D11),
         mKnowsCompositor(aKnowsCompositor),
         mFailureReason(aFailureReason) {}
 
   NS_IMETHOD Run() override {
     NS_ASSERTION(NS_IsMainThread(), "Must be on main thread.");
     const bool deblacklistingForTelemetry =
         XRE_IsGPUProcess() &&
-        StaticPrefs::PDMWMFDeblacklistingForTelemetryInGPUProcess();
+        StaticPrefs::media_wmf_deblacklisting_for_telemetry_in_gpu_process();
     nsACString* failureReason = &mFailureReason;
     nsCString secondFailureReason;
     if (mBackend == LayersBackend::LAYERS_D3D11 &&
-        StaticPrefs::PDMWMFAllowD3D11() && IsWin8OrLater()) {
+        StaticPrefs::media_wmf_dxva_d3d11_enabled() && IsWin8OrLater()) {
       const nsCString& blacklistedDLL = FindD3D11BlacklistedDLL();
       if (!deblacklistingForTelemetry && !blacklistedDLL.IsEmpty()) {
         failureReason->AppendPrintf("D3D11 blacklisted with DLL %s",
                                     blacklistedDLL.get());
       } else {
         mDXVA2Manager =
             DXVA2Manager::CreateD3D11DXVA(mKnowsCompositor, *failureReason);
         if (mDXVA2Manager) {
@@ -510,17 +510,18 @@ bool WMFVideoMFTManager::InitializeDXVA(
         SystemGroup::EventTargetFor(mozilla::TaskCategory::Other), event);
   }
   mDXVA2Manager = event->mDXVA2Manager;
 
   return mDXVA2Manager != nullptr;
 }
 
 MediaResult WMFVideoMFTManager::ValidateVideoInfo() {
-  if (mStreamType != H264 || StaticPrefs::PDMWMFAllowUnsupportedResolutions()) {
+  if (mStreamType != H264 ||
+      StaticPrefs::media_wmf_allow_unsupported_resolutions()) {
     return NS_OK;
   }
 
   // The WMF H.264 decoder is documented to have a minimum resolution 48x48
   // pixels for resolution, but we won't enable hw decoding for the resolution <
   // 132 pixels. It's assumed the software decoder doesn't have this limitation,
   // but it still might have maximum resolution limitation.
   // https://msdn.microsoft.com/en-us/library/windows/desktop/dd797815(v=vs.85).aspx
@@ -581,18 +582,18 @@ MediaResult WMFVideoMFTManager::InitInte
 
   RefPtr<IMFAttributes> attr(decoder->GetAttributes());
   UINT32 aware = 0;
   if (attr) {
     attr->GetUINT32(MF_SA_D3D_AWARE, &aware);
     attr->SetUINT32(CODECAPI_AVDecNumWorkerThreads,
                     WMFDecoderModule::GetNumDecoderThreads());
     bool lowLatency =
-        (StaticPrefs::PDMWMFLowLatencyEnabled() || IsWin10OrLater()) &&
-        !StaticPrefs::PDMWMFLowLatencyForceDisabled();
+        (StaticPrefs::media_wmf_low_latency_enabled() || IsWin10OrLater()) &&
+        !StaticPrefs::media_mwf_low_latency_force_disabled();
     if (mLowLatency || lowLatency) {
       hr = attr->SetUINT32(CODECAPI_AVLowLatencyMode, TRUE);
       if (SUCCEEDED(hr)) {
         LOG("Enabling Low Latency Mode");
       } else {
         LOG("Couldn't enable Low Latency Mode");
       }
     }
@@ -1145,16 +1146,16 @@ bool WMFVideoMFTManager::IsHardwareAccel
   return mDecoder && mUseHwAccel;
 }
 
 nsCString WMFVideoMFTManager::GetDescriptionName() const {
   nsCString failureReason;
   bool hw = IsHardwareAccelerated(failureReason);
   return nsPrintfCString("wmf %s video decoder - %s",
                          hw ? "hardware" : "software",
-                         hw ? StaticPrefs::PDMWMFUseNV12Format() &&
+                         hw ? StaticPrefs::media_wmf_use_nv12_format() &&
                                       gfx::DeviceManagerDx::Get()->CanUseNV12()
                                   ? "nv12"
                                   : "rgba32"
                             : "yuv420");
 }
 
 }  // namespace mozilla
--- a/dom/media/platforms/wrappers/MediaChangeMonitor.cpp
+++ b/dom/media/platforms/wrappers/MediaChangeMonitor.cpp
@@ -525,17 +525,17 @@ MediaResult MediaChangeMonitor::CreateDe
   }
   return rv;
 }
 
 bool MediaChangeMonitor::CanRecycleDecoder() const {
   AssertOnTaskQueue();
 
   MOZ_ASSERT(mDecoder);
-  return StaticPrefs::MediaDecoderRecycleEnabled() &&
+  return StaticPrefs::media_decoder_recycle_enabled() &&
          mDecoder->SupportDecoderRecycling();
 }
 
 void MediaChangeMonitor::DecodeFirstSample(MediaRawData* aSample) {
   AssertOnTaskQueue();
 
   // We feed all the data to AnnexB decoder as a non-keyframe could contain
   // the SPS/PPS when used with WebRTC and this data is needed by the decoder.
--- a/dom/media/webaudio/AudioContext.cpp
+++ b/dom/media/webaudio/AudioContext.cpp
@@ -1068,17 +1068,17 @@ void AudioContext::MaybeUpdateAutoplayTe
   }
 }
 
 void AudioContext::ReportBlocked() {
   ReportToConsole(nsIScriptError::warningFlag,
                   "BlockAutoplayWebAudioStartError");
   mWasAllowedToStart = false;
 
-  if (!StaticPrefs::MediaBlockEventEnabled()) {
+  if (!StaticPrefs::media_autoplay_block_event_enabled()) {
     return;
   }
 
   RefPtr<AudioContext> self = this;
   RefPtr<nsIRunnable> r =
       NS_NewRunnableFunction("AudioContext::AutoplayBlocked", [self]() {
         nsPIDOMWindowInner* parent = self->GetParentObject();
         if (!parent) {
--- a/dom/media/webm/WebMDecoder.cpp
+++ b/dom/media/webm/WebMDecoder.cpp
@@ -62,34 +62,34 @@ nsTArray<UniquePtr<TrackInfo>> WebMDecod
           trackInfo->GetAsVideoInfo()->mColorDepth =
               gfx::ColorDepthForBitDepth(bitDepth);
         }
         tracks.AppendElement(std::move(trackInfo));
         continue;
       }
     }
 #ifdef MOZ_AV1
-    if (StaticPrefs::MediaAv1Enabled() && IsAV1CodecString(codec)) {
+    if (StaticPrefs::media_av1_enabled() && IsAV1CodecString(codec)) {
       tracks.AppendElement(
           CreateTrackInfoWithMIMETypeAndContainerTypeExtraParameters(
               NS_LITERAL_CSTRING("video/av1"), aType));
       continue;
     }
 #endif
     // Unknown codec
     aError = MediaResult(
         NS_ERROR_DOM_MEDIA_FATAL_ERR,
         RESULT_DETAIL("Unknown codec:%s", NS_ConvertUTF16toUTF8(codec).get()));
   }
   return tracks;
 }
 
 /* static */
 bool WebMDecoder::IsSupportedType(const MediaContainerType& aContainerType) {
-  if (!StaticPrefs::MediaWebMEnabled()) {
+  if (!StaticPrefs::media_webm_enabled()) {
     return false;
   }
 
   MediaResult rv = NS_OK;
   auto tracks = GetTracksInfo(aContainerType, rv);
 
   if (NS_FAILED(rv)) {
     return false;
--- a/dom/media/webspeech/recognition/SpeechRecognition.cpp
+++ b/dom/media/webspeech/recognition/SpeechRecognition.cpp
@@ -83,17 +83,17 @@ already_AddRefed<nsISpeechRecognitionSer
         NS_LITERAL_CSTRING(DEFAULT_RECOGNITION_SERVICE_PREFIX) +
         NS_ConvertUTF16toUTF8(aLang);
   } else if (!prefValue.IsEmpty()) {
     speechRecognitionService = prefValue;
   } else {
     speechRecognitionService = DEFAULT_RECOGNITION_SERVICE;
   }
 
-  if (StaticPrefs::MediaWebspeechTextFakeRecognitionService()) {
+  if (StaticPrefs::media_webspeech_test_fake_recognition_service()) {
     speechRecognitionServiceCID =
         NS_SPEECH_RECOGNITION_SERVICE_CONTRACTID_PREFIX "fake";
   } else {
     speechRecognitionServiceCID =
         NS_LITERAL_CSTRING(NS_SPEECH_RECOGNITION_SERVICE_CONTRACTID_PREFIX) +
         speechRecognitionService;
   }
 
@@ -137,17 +137,17 @@ SpeechRecognition::SpeechRecognition(nsP
       mEndpointer(kSAMPLE_RATE),
       mAudioSamplesPerChunk(mEndpointer.FrameSize()),
       mSpeechDetectionTimer(NS_NewTimer()),
       mSpeechGrammarList(new SpeechGrammarList(GetParentObject())),
       mInterimResults(false),
       mMaxAlternatives(1) {
   SR_LOG("created SpeechRecognition");
 
-  if (StaticPrefs::MediaWebspeechTestEnable()) {
+  if (StaticPrefs::media_webspeech_test_enable()) {
     nsCOMPtr<nsIObserverService> obs = services::GetObserverService();
     obs->AddObserver(this, SPEECH_RECOGNITION_TEST_EVENT_REQUEST_TOPIC, false);
     obs->AddObserver(this, SPEECH_RECOGNITION_TEST_END_TOPIC, false);
   }
 
   mEndpointer.set_speech_input_complete_silence_length(
       Preferences::GetInt(PREFERENCE_ENDPOINTER_SILENCE_LENGTH, 1250000));
   mEndpointer.set_long_speech_input_complete_silence_length(
@@ -189,18 +189,18 @@ bool SpeechRecognition::IsAuthorized(JSC
   if (NS_WARN_IF(NS_FAILED(rv))) {
     return false;
   }
 
   bool hasPermission =
       (speechRecognition == nsIPermissionManager::ALLOW_ACTION);
 
   return (hasPermission ||
-          StaticPrefs::MediaWebspeechRecognitionForceEnable() ||
-          StaticPrefs::MediaWebspeechTestEnable()) &&
+          StaticPrefs::media_webspeech_recognition_force_enable() ||
+          StaticPrefs::media_webspeech_test_enable()) &&
          StaticPrefs::media_webspeech_recognition_enable();
 }
 
 already_AddRefed<SpeechRecognition> SpeechRecognition::Constructor(
     const GlobalObject& aGlobal, ErrorResult& aRv) {
   nsCOMPtr<nsPIDOMWindowInner> win = do_QueryInterface(aGlobal.GetAsSupports());
   if (!win) {
     aRv.Throw(NS_ERROR_FAILURE);
@@ -603,17 +603,17 @@ SpeechRecognition::Observe(nsISupports* 
       StateBetween(STATE_IDLE, STATE_WAITING_FOR_SPEECH)) {
     DispatchError(SpeechRecognition::EVENT_AUDIO_ERROR,
                   SpeechRecognitionErrorCode::No_speech,
                   NS_LITERAL_STRING("No speech detected (timeout)"));
   } else if (!strcmp(aTopic, SPEECH_RECOGNITION_TEST_END_TOPIC)) {
     nsCOMPtr<nsIObserverService> obs = services::GetObserverService();
     obs->RemoveObserver(this, SPEECH_RECOGNITION_TEST_EVENT_REQUEST_TOPIC);
     obs->RemoveObserver(this, SPEECH_RECOGNITION_TEST_END_TOPIC);
-  } else if (StaticPrefs::MediaWebspeechTextFakeFsmEvents() &&
+  } else if (StaticPrefs::media_webspeech_test_fake_fsm_events() &&
              !strcmp(aTopic, SPEECH_RECOGNITION_TEST_EVENT_REQUEST_TOPIC)) {
     ProcessTestEventRequest(aSubject, nsDependentString(aData));
   }
 
   return NS_OK;
 }
 
 void SpeechRecognition::ProcessTestEventRequest(nsISupports* aSubject,
@@ -621,17 +621,17 @@ void SpeechRecognition::ProcessTestEvent
   if (aEventName.EqualsLiteral("EVENT_ABORT")) {
     Abort();
   } else if (aEventName.EqualsLiteral("EVENT_AUDIO_ERROR")) {
     DispatchError(
         SpeechRecognition::EVENT_AUDIO_ERROR,
         SpeechRecognitionErrorCode::Audio_capture,  // TODO different codes?
         NS_LITERAL_STRING("AUDIO_ERROR test event"));
   } else {
-    NS_ASSERTION(StaticPrefs::MediaWebspeechTextFakeRecognitionService(),
+    NS_ASSERTION(StaticPrefs::media_webspeech_test_fake_recognition_service(),
                  "Got request for fake recognition service event, but "
                  "media.webspeech.test.fake_recognition_service is unset");
 
     // let the fake recognition service handle the request
   }
 }
 
 already_AddRefed<SpeechGrammarList> SpeechRecognition::Grammars() const {
--- a/dom/media/webspeech/recognition/test/FakeSpeechRecognitionService.cpp
+++ b/dom/media/webspeech/recognition/test/FakeSpeechRecognitionService.cpp
@@ -53,17 +53,17 @@ FakeSpeechRecognitionService::ValidateAn
 }
 
 NS_IMETHODIMP
 FakeSpeechRecognitionService::Abort() { return NS_OK; }
 
 NS_IMETHODIMP
 FakeSpeechRecognitionService::Observe(nsISupports* aSubject, const char* aTopic,
                                       const char16_t* aData) {
-  MOZ_ASSERT(StaticPrefs::MediaWebspeechTextFakeRecognitionService(),
+  MOZ_ASSERT(StaticPrefs::media_webspeech_test_fake_recognition_service(),
              "Got request to fake recognition service event, but "
              "media.webspeech.test.fake_recognition_service is not set");
 
   if (!strcmp(aTopic, SPEECH_RECOGNITION_TEST_END_TOPIC)) {
     nsCOMPtr<nsIObserverService> obs = services::GetObserverService();
     obs->RemoveObserver(this, SPEECH_RECOGNITION_TEST_EVENT_REQUEST_TOPIC);
     obs->RemoveObserver(this, SPEECH_RECOGNITION_TEST_END_TOPIC);
 
--- a/dom/media/webspeech/synth/nsSynthVoiceRegistry.cpp
+++ b/dom/media/webspeech/synth/nsSynthVoiceRegistry.cpp
@@ -318,17 +318,17 @@ nsSynthVoiceRegistry::RemoveVoice(nsISpe
     return NS_ERROR_INVALID_ARG;
   }
 
   mVoices.RemoveElement(retval);
   mDefaultVoices.RemoveElement(retval);
   mUriVoiceMap.Remove(aUri);
 
   if (retval->mIsQueued &&
-      !StaticPrefs::MediaWebspeechSynthForceGlobalQueue()) {
+      !StaticPrefs::media_webspeech_synth_force_global_queue()) {
     // Check if this is the last queued voice, and disable the global queue if
     // it is.
     bool queued = false;
     for (uint32_t i = 0; i < mVoices.Length(); i++) {
       VoiceData* voice = mVoices[i];
       if (voice->mIsQueued) {
         queued = true;
         break;
@@ -658,17 +658,18 @@ void nsSynthVoiceRegistry::Speak(const n
   if (!voice) {
     NS_WARNING("No voices found.");
     aTask->ForceError(0, 0);
     return;
   }
 
   aTask->SetChosenVoiceURI(voice->mUri);
 
-  if (mUseGlobalQueue || StaticPrefs::MediaWebspeechSynthForceGlobalQueue()) {
+  if (mUseGlobalQueue ||
+      StaticPrefs::media_webspeech_synth_force_global_queue()) {
     LOG(LogLevel::Debug,
         ("nsSynthVoiceRegistry::Speak queueing text='%s' lang='%s' uri='%s' "
          "rate=%f pitch=%f",
          NS_ConvertUTF16toUTF8(aText).get(), NS_ConvertUTF16toUTF8(aLang).get(),
          NS_ConvertUTF16toUTF8(aUri).get(), aRate, aPitch));
     RefPtr<GlobalQueueItem> item =
         new GlobalQueueItem(voice, aTask, aText, aVolume, aRate, aPitch);
     mGlobalQueue.AppendElement(item);
@@ -728,18 +729,18 @@ void nsSynthVoiceRegistry::ResumeQueue()
 
 bool nsSynthVoiceRegistry::IsSpeaking() { return mIsSpeaking; }
 
 void nsSynthVoiceRegistry::SetIsSpeaking(bool aIsSpeaking) {
   MOZ_ASSERT(XRE_IsParentProcess());
 
   // Only set to 'true' if global queue is enabled.
   mIsSpeaking =
-      aIsSpeaking &&
-      (mUseGlobalQueue || StaticPrefs::MediaWebspeechSynthForceGlobalQueue());
+      aIsSpeaking && (mUseGlobalQueue ||
+                      StaticPrefs::media_webspeech_synth_force_global_queue());
 
   nsTArray<SpeechSynthesisParent*> ssplist;
   GetAllSpeechSynthActors(ssplist);
   for (uint32_t i = 0; i < ssplist.Length(); ++i) {
     Unused << ssplist[i]->SendIsSpeakingChanged(aIsSpeaking);
   }
 }
 
--- a/gfx/gl/GLLibraryEGL.cpp
+++ b/gfx/gl/GLLibraryEGL.cpp
@@ -284,24 +284,24 @@ static EGLDisplay GetAndInitDisplayForAc
   EGLDisplay ret = 0;
 
   if (wr::RenderThread::IsInRenderThread()) {
     return GetAndInitDisplayForWebRender(egl, EGL_DEFAULT_DISPLAY);
   }
 
   FeatureState& d3d11ANGLE = gfxConfig::GetFeature(Feature::D3D11_HW_ANGLE);
 
-  if (!StaticPrefs::WebGLANGLETryD3D11())
+  if (!StaticPrefs::webgl_angle_try_d3d11()) {
     d3d11ANGLE.UserDisable("User disabled D3D11 ANGLE by pref",
                            NS_LITERAL_CSTRING("FAILURE_ID_ANGLE_PREF"));
-
-  if (StaticPrefs::WebGLANGLEForceD3D11())
+  }
+  if (StaticPrefs::webgl_angle_force_d3d11()) {
     d3d11ANGLE.UserForceEnable(
         "User force-enabled D3D11 ANGLE on disabled hardware");
-
+  }
   gAngleErrorReporter.SetFailureId(out_failureId);
 
   auto guardShutdown = mozilla::MakeScopeExit([&] {
     gAngleErrorReporter.SetFailureId(nullptr);
     // NOTE: Ideally we should be calling ANGLEPlatformShutdown after the
     //       ANGLE display is destroyed. However gAngleErrorReporter
     //       will live longer than the ANGLE display so we're fine.
   });
@@ -736,17 +736,17 @@ EGLDisplay GLLibraryEGL::CreateDisplay(b
   if (IsExtensionSupported(ANGLE_platform_angle_d3d)) {
     nsCString accelAngleFailureId;
     bool accelAngleSupport =
         IsAccelAngleSupported(gfxInfo, &accelAngleFailureId);
     bool shouldTryAccel = forceAccel || accelAngleSupport;
     bool shouldTryWARP = !forceAccel;  // Only if ANGLE not supported or fails
 
     // If WARP preferred, will override ANGLE support
-    if (StaticPrefs::WebGLANGLEForceWARP()) {
+    if (StaticPrefs::webgl_angle_force_warp()) {
       shouldTryWARP = true;
       shouldTryAccel = false;
       if (accelAngleFailureId.IsEmpty()) {
         accelAngleFailureId = NS_LITERAL_CSTRING("FEATURE_FAILURE_FORCE_WARP");
       }
     }
 
     // Hardware accelerated ANGLE path (supported or force accel)
--- a/gfx/gl/GLScreenBuffer.cpp
+++ b/gfx/gl/GLScreenBuffer.cpp
@@ -61,36 +61,36 @@ UniquePtr<SurfaceFactory> GLScreenBuffer
     GLContext* gl, const SurfaceCaps& caps,
     KnowsCompositor* compositorConnection, const layers::TextureFlags& flags) {
   LayersIPCChannel* ipcChannel = compositorConnection->GetTextureForwarder();
   const layers::LayersBackend backend =
       compositorConnection->GetCompositorBackendType();
   const bool useANGLE = compositorConnection->GetCompositorUseANGLE();
 
   const bool useGl =
-      !StaticPrefs::WebGLForceLayersReadback() &&
+      !StaticPrefs::webgl_force_layers_readback() &&
       (backend == layers::LayersBackend::LAYERS_OPENGL ||
        (backend == layers::LayersBackend::LAYERS_WR && !useANGLE));
   const bool useD3D =
-      !StaticPrefs::WebGLForceLayersReadback() &&
+      !StaticPrefs::webgl_force_layers_readback() &&
       (backend == layers::LayersBackend::LAYERS_D3D11 ||
        (backend == layers::LayersBackend::LAYERS_WR && useANGLE));
 
   UniquePtr<SurfaceFactory> factory = nullptr;
   if (useGl) {
 #if defined(XP_MACOSX)
     factory = SurfaceFactory_IOSurface::Create(gl, caps, ipcChannel, flags);
 #elif defined(MOZ_X11)
     if (sGLXLibrary.UseTextureFromPixmap())
       factory = SurfaceFactory_GLXDrawable::Create(gl, caps, ipcChannel, flags);
 #elif defined(MOZ_WIDGET_UIKIT)
     factory = MakeUnique<SurfaceFactory_GLTexture>(mGLContext, caps, ipcChannel,
                                                    mFlags);
 #elif defined(MOZ_WIDGET_ANDROID)
-    if (XRE_IsParentProcess() && !StaticPrefs::WebGLSurfaceTextureEnabled()) {
+    if (XRE_IsParentProcess() && !StaticPrefs::webgl_enable_surface_texture()) {
       factory = SurfaceFactory_EGLImage::Create(gl, caps, ipcChannel, flags);
     } else {
       factory =
           SurfaceFactory_SurfaceTexture::Create(gl, caps, ipcChannel, flags);
     }
 #else
     if (gl->GetContextType() == GLContextType::EGL) {
       if (XRE_IsParentProcess()) {
@@ -109,17 +109,17 @@ UniquePtr<SurfaceFactory> GLScreenBuffer
     // are both WARP or both not WARP
     gfx::DeviceManagerDx* dm = gfx::DeviceManagerDx::Get();
     if (gl->IsANGLE() && (gl->IsWARP() == dm->IsWARP()) &&
         dm->TextureSharingWorks()) {
       factory =
           SurfaceFactory_ANGLEShareHandle::Create(gl, caps, ipcChannel, flags);
     }
 
-    if (!factory && StaticPrefs::WebGLDXGLEnabled()) {
+    if (!factory && StaticPrefs::webgl_dxgl_enabled()) {
       factory =
           SurfaceFactory_D3D11Interop::Create(gl, caps, ipcChannel, flags);
     }
 #endif
   }
 
 #ifdef MOZ_X11
   if (!factory && sGLXLibrary.UseTextureFromPixmap()) {
--- a/gfx/gl/SharedSurfaceD3D11Interop.cpp
+++ b/gfx/gl/SharedSurfaceD3D11Interop.cpp
@@ -409,17 +409,17 @@ SharedSurface_D3D11Interop::SharedSurfac
           gl, size, hasAlpha, true),
       mProdTex(prodTex),
       mInteropFB(interopFB),
       mInteropRB(interopRB),
       mInterop(interop),
       mLockHandle(lockHandle),
       mTexD3D(texD3D),
       mDXGIHandle(dxgiHandle),
-      mNeedsFinish(StaticPrefs::WebGLDXGLNeedsFinish()),
+      mNeedsFinish(StaticPrefs::webgl_dxgl_needs_finish()),
       mLockedForGL(false) {
   MOZ_ASSERT(bool(mProdTex) == bool(mInteropFB));
 }
 
 SharedSurface_D3D11Interop::~SharedSurface_D3D11Interop() {
   MOZ_ASSERT(!IsProducerAcquired());
 
   if (!mGL || !mGL->MakeCurrent()) return;
--- a/gfx/ipc/GPUProcessManager.cpp
+++ b/gfx/ipc/GPUProcessManager.cpp
@@ -931,17 +931,17 @@ bool GPUProcessManager::CreateContentVRM
 
   *aOutEndpoint = std::move(childPipe);
   return true;
 }
 
 void GPUProcessManager::CreateContentRemoteDecoderManager(
     base::ProcessId aOtherProcess,
     ipc::Endpoint<PRemoteDecoderManagerChild>* aOutEndpoint) {
-  if (!EnsureGPUReady() || !StaticPrefs::MediaGpuProcessDecoder() ||
+  if (!EnsureGPUReady() || !StaticPrefs::media_gpu_process_decoder() ||
       !mDecodeVideoOnGpuProcess) {
     return;
   }
 
   ipc::Endpoint<PRemoteDecoderManagerParent> parentPipe;
   ipc::Endpoint<PRemoteDecoderManagerChild> childPipe;
 
   nsresult rv = PRemoteDecoderManager::CreateEndpoints(
--- a/gfx/layers/D3D11ShareHandleImage.cpp
+++ b/gfx/layers/D3D11ShareHandleImage.cpp
@@ -218,21 +218,21 @@ class MOZ_RAII D3D11TextureClientAllocat
   const RefPtr<ID3D11Device> mDevice;
 };
 
 D3D11RecycleAllocator::D3D11RecycleAllocator(
     KnowsCompositor* aAllocator, ID3D11Device* aDevice,
     gfx::SurfaceFormat aPreferredFormat)
     : TextureClientRecycleAllocator(aAllocator),
       mDevice(aDevice),
-      mCanUseNV12(StaticPrefs::PDMWMFUseNV12Format() &&
+      mCanUseNV12(StaticPrefs::media_wmf_use_nv12_format() &&
                   gfx::DeviceManagerDx::Get()->CanUseNV12()),
-      mCanUseP010(StaticPrefs::PDMWMFUseNV12Format() &&
+      mCanUseP010(StaticPrefs::media_wmf_use_nv12_format() &&
                   gfx::DeviceManagerDx::Get()->CanUseP010()),
-      mCanUseP016(StaticPrefs::PDMWMFUseNV12Format() &&
+      mCanUseP016(StaticPrefs::media_wmf_use_nv12_format() &&
                   gfx::DeviceManagerDx::Get()->CanUseP016()) {
   SetPreferredSurfaceFormat(aPreferredFormat);
 }
 
 void D3D11RecycleAllocator::SetPreferredSurfaceFormat(
     gfx::SurfaceFormat aPreferredFormat) {
   if ((aPreferredFormat == gfx::SurfaceFormat::NV12 && mCanUseNV12) ||
       (aPreferredFormat == gfx::SurfaceFormat::P010 && mCanUseP010) ||
@@ -251,17 +251,17 @@ already_AddRefed<TextureClient> D3D11Rec
   // we could not reuse old D3D11Textures. It could cause video flickering.
   RefPtr<ID3D11Device> device = gfx::DeviceManagerDx::Get()->GetImageDevice();
   if (!!mImageDevice && mImageDevice != device) {
     ShrinkToMinimumSize();
   }
   mImageDevice = device;
 
   TextureAllocationFlags allocFlags = TextureAllocationFlags::ALLOC_DEFAULT;
-  if (StaticPrefs::PDMWMFUseSyncTexture() ||
+  if (StaticPrefs::media_wmf_use_sync_texture() ||
       mDevice == DeviceManagerDx::Get()->GetCompositorDevice()) {
     // If our device is the compositor device, we don't need any synchronization
     // in practice.
     allocFlags = TextureAllocationFlags::ALLOC_MANUAL_SYNCHRONIZATION;
   }
 
   D3D11TextureClientAllocationHelper helper(mUsableSurfaceFormat, aColorSpace,
                                             aSize, allocFlags, mDevice,
--- a/gfx/src/DriverCrashGuard.cpp
+++ b/gfx/src/DriverCrashGuard.cpp
@@ -504,21 +504,21 @@ bool GLContextCrashGuard::UpdateEnvironm
   }
 
   checked = true;
 
   bool changed = false;
 
 #if defined(XP_WIN)
   changed |= CheckAndUpdateBoolPref("gfx.driver-init.webgl-angle-force-d3d11",
-                                    StaticPrefs::WebGLANGLEForceD3D11());
+                                    StaticPrefs::webgl_angle_force_d3d11());
   changed |= CheckAndUpdateBoolPref("gfx.driver-init.webgl-angle-try-d3d11",
-                                    StaticPrefs::WebGLANGLETryD3D11());
+                                    StaticPrefs::webgl_angle_try_d3d11());
   changed |= CheckAndUpdateBoolPref("gfx.driver-init.webgl-angle-force-warp",
-                                    StaticPrefs::WebGLANGLEForceWARP());
+                                    StaticPrefs::webgl_angle_force_warp());
   changed |= CheckAndUpdateBoolPref(
       "gfx.driver-init.webgl-angle",
       FeatureEnabled(nsIGfxInfo::FEATURE_WEBGL_ANGLE, false));
   changed |= CheckAndUpdateBoolPref(
       "gfx.driver-init.direct3d11-angle",
       FeatureEnabled(nsIGfxInfo::FEATURE_DIRECT3D_11_ANGLE, false));
 #endif
 
--- a/gfx/thebes/D3D11Checks.cpp
+++ b/gfx/thebes/D3D11Checks.cpp
@@ -444,27 +444,27 @@ bool D3D11Checks::DoesRemotePresentWork(
     if (gfxInfo) {
       gfxInfo->GetAdapterDriverVersion(version);
     }
     return DXVA2Manager::IsNV12Supported(desc.VendorId, desc.DeviceId, version);
   };
 
   auto doesP010Work = [&]() {
     if (gfxVars::DXP010Blocked() &&
-        !StaticPrefs::PDMWMFForceAllowP010Format()) {
+        !StaticPrefs::media_wmf_force_allow_p010_format()) {
       return false;
     }
     UINT formatSupport;
     HRESULT hr = device->CheckFormatSupport(DXGI_FORMAT_P010, &formatSupport);
     return (SUCCEEDED(hr) && (formatSupport & D3D11_FORMAT_SUPPORT_TEXTURE2D));
   };
 
   auto doesP016Work = [&]() {
     if (gfxVars::DXP016Blocked() &&
-        !StaticPrefs::PDMWMFForceAllowP010Format()) {
+        !StaticPrefs::media_wmf_force_allow_p010_format()) {
       return false;
     }
     UINT formatSupport;
     HRESULT hr = device->CheckFormatSupport(DXGI_FORMAT_P016, &formatSupport);
     return (SUCCEEDED(hr) && (formatSupport & D3D11_FORMAT_SUPPORT_TEXTURE2D));
   };
 
   VideoFormatOptionSet options;
--- a/gfx/thebes/gfxPlatform.cpp
+++ b/gfx/thebes/gfxPlatform.cpp
@@ -917,21 +917,22 @@ void gfxPlatform::Init() {
     // Layers prefs
     forcedPrefs.AppendPrintf(
         "-L%d%d%d%d", StaticPrefs::layers_amd_switchable_gfx_enabled(),
         StaticPrefs::layers_acceleration_disabled_do_not_use_directly(),
         StaticPrefs::layers_acceleration_force_enabled_do_not_use_directly(),
         StaticPrefs::layers_d3d11_force_warp());
     // WebGL prefs
     forcedPrefs.AppendPrintf(
-        "-W%d%d%d%d%d%d%d%d", StaticPrefs::WebGLANGLEForceD3D11(),
-        StaticPrefs::WebGLANGLEForceWARP(), StaticPrefs::WebGLDisabled(),
-        StaticPrefs::WebGLDisableANGLE(), StaticPrefs::WebGLDXGLEnabled(),
-        StaticPrefs::WebGLForceEnabled(),
-        StaticPrefs::WebGLForceLayersReadback(), StaticPrefs::WebGLForceMSAA());
+        "-W%d%d%d%d%d%d%d%d", StaticPrefs::webgl_angle_force_d3d11(),
+        StaticPrefs::webgl_angle_force_warp(), StaticPrefs::webgl_disabled(),
+        StaticPrefs::webgl_disable_angle(), StaticPrefs::webgl_dxgl_enabled(),
+        StaticPrefs::webgl_force_enabled(),
+        StaticPrefs::webgl_force_layers_readback(),
+        StaticPrefs::webgl_msaa_force());
     // Prefs that don't fit into any of the other sections
     forcedPrefs.AppendPrintf("-T%d%d%d) ",
                              StaticPrefs::gfx_android_rgb16_force(),
                              0,  // SkiaGL canvas no longer supported
                              StaticPrefs::layers_force_shmem_tiles());
     ScopedGfxFeatureReporter::AppNote(forcedPrefs);
   }
 
@@ -2422,17 +2423,17 @@ void gfxPlatform::InitAcceleration() {
   if (Preferences::GetBool("media.hardware-video-decoding.enabled", false) &&
 #ifdef XP_WIN
       Preferences::GetBool("media.wmf.dxva.enabled", true) &&
 #endif
       NS_SUCCEEDED(
           gfxInfo->GetFeatureStatus(nsIGfxInfo::FEATURE_HARDWARE_VIDEO_DECODING,
                                     discardFailureId, &status))) {
     if (status == nsIGfxInfo::FEATURE_STATUS_OK ||
-        StaticPrefs::HardwareVideoDecodingForceEnabled()) {
+        StaticPrefs::media_hardware_video_decoding_force_enabled()) {
       sLayersSupportsHardwareVideoDecoding = true;
     }
   }
 
   sLayersAccelerationPrefsInitialized = true;
 
   if (XRE_IsParentProcess()) {
     Preferences::RegisterCallbackAndCall(
--- a/media/webrtc/signaling/src/media-conduit/MediaCodecVideoCodec.cpp
+++ b/media/webrtc/signaling/src/media-conduit/MediaCodecVideoCodec.cpp
@@ -16,17 +16,17 @@ static const char* mcvcLogTag = "MediaCo
 #  undef LOGTAG
 #endif
 #define LOGTAG mcvcLogTag
 
 WebrtcVideoEncoder* MediaCodecVideoCodec::CreateEncoder(CodecType aCodecType) {
   CSFLogDebug(LOGTAG, "%s ", __FUNCTION__);
   if (aCodecType == CODEC_VP8) {
     if (StaticPrefs::
-            MediaNavigatorHardwareVp8encodeAccelerationRemoteEnabled()) {
+            media_navigator_hardware_vp8_encode_acceleration_remote_enabled()) {
       return new WebrtcMediaCodecVP8VideoRemoteEncoder();
     } else {
       return new WebrtcMediaCodecVP8VideoEncoder();
     }
   }
   return nullptr;
 }
 
--- a/media/webrtc/signaling/src/media-conduit/MediaDataDecoderCodec.cpp
+++ b/media/webrtc/signaling/src/media-conduit/MediaDataDecoderCodec.cpp
@@ -15,22 +15,22 @@ WebrtcVideoEncoder* MediaDataDecoderCode
 }
 
 /* static */
 WebrtcVideoDecoder* MediaDataDecoderCodec::CreateDecoder(
     webrtc::VideoCodecType aCodecType) {
   switch (aCodecType) {
     case webrtc::VideoCodecType::kVideoCodecVP8:
     case webrtc::VideoCodecType::kVideoCodecVP9:
-      if (!StaticPrefs::MediaNavigatorMediadatadecoderVPXEnabled()) {
+      if (!StaticPrefs::media_navigator_mediadatadecoder_vpx_enabled()) {
         return nullptr;
       }
       break;
     case webrtc::VideoCodecType::kVideoCodecH264:
-      if (!StaticPrefs::MediaNavigatorMediadatadecoderH264Enabled()) {
+      if (!StaticPrefs::media_navigator_mediadatadecoder_h264_enabled()) {
         return nullptr;
       }
       break;
     default:
       return nullptr;
   }
   return new WebrtcMediaDataDecoder();
 }
--- a/modules/libpref/init/StaticPrefList.h
+++ b/modules/libpref/init/StaticPrefList.h
@@ -5469,137 +5469,123 @@ VARCACHE_PREF(
 //---------------------------------------------------------------------------
 
 // These prefs use camel case instead of snake case for the getter because one
 // reviewer had an unshakeable preference for that. Who could that be?
 
 VARCACHE_PREF(
   Live,
   "media.autoplay.enabled.user-gestures-needed",
-  MediaAutoplayUserGesturesNeeded,
-  bool, false
-)
-
-VARCACHE_PREF(
-  Live,
-  "media.autoplay.allow-muted",
-  MediaAutoplayAllowMuted,
-  RelaxedAtomicBool, true
-)
-
-VARCACHE_PREF(
-  Live,
-  "media.autoplay.blackList-override-default",
-  MediaAutoplayBlackListOverrideDefault,
-  RelaxedAtomicBool, true
+   media_autoplay_enabled_user_gestures_needed,
+  bool, false
 )
 
 // File-backed MediaCache size.
 VARCACHE_PREF(
   Live,
   "media.cache_size",
-  MediaCacheSize,
+   media_cache_size,
   RelaxedAtomicUint32, 512000 // Measured in KiB
 )
 
 // Size of file backed MediaCache while on a connection which is cellular (3G,
 // etc), and thus assumed to be "expensive".
 VARCACHE_PREF(
   Live,
   "media.cache_size.cellular",
-  MediaCacheCellularSize,
+   media_cache_size_cellular,
   RelaxedAtomicUint32, 32768 // Measured in KiB
 )
 
 // If a resource is known to be smaller than this size (in kilobytes), a
 // memory-backed MediaCache may be used; otherwise the (single shared global)
 // file-backed MediaCache is used.
 VARCACHE_PREF(
   Live,
   "media.memory_cache_max_size",
-  MediaMemoryCacheMaxSize,
+   media_memory_cache_max_size,
   uint32_t, 8192      // Measured in KiB
 )
 
 // Don't create more memory-backed MediaCaches if their combined size would go
 // above this absolute size limit.
 VARCACHE_PREF(
   Live,
   "media.memory_caches_combined_limit_kb",
-  MediaMemoryCachesCombinedLimitKb,
+   media_memory_caches_combined_limit_kb,
   uint32_t, 524288
 )
 
 // Don't create more memory-backed MediaCaches if their combined size would go
 // above this relative size limit (a percentage of physical memory).
 VARCACHE_PREF(
   Live,
   "media.memory_caches_combined_limit_pc_sysmem",
-  MediaMemoryCachesCombinedLimitPcSysmem,
+   media_memory_caches_combined_limit_pc_sysmem,
   uint32_t, 5         // A percentage
 )
 
 // When a network connection is suspended, don't resume it until the amount of
 // buffered data falls below this threshold (in seconds).
 VARCACHE_PREF(
   Live,
   "media.cache_resume_threshold",
-  MediaCacheResumeThreshold,
+   media_cache_resume_threshold,
   RelaxedAtomicUint32, 30
 )
 VARCACHE_PREF(
   Live,
   "media.cache_resume_threshold.cellular",
-  MediaCacheCellularResumeThreshold,
+   media_cache_resume_threshold_cellular,
   RelaxedAtomicUint32, 10
 )
 
 // Stop reading ahead when our buffered data is this many seconds ahead of the
 // current playback position. This limit can stop us from using arbitrary
 // amounts of network bandwidth prefetching huge videos.
 VARCACHE_PREF(
   Live,
   "media.cache_readahead_limit",
-  MediaCacheReadaheadLimit,
+   media_cache_readahead_limit,
   RelaxedAtomicUint32, 60
 )
 VARCACHE_PREF(
   Live,
   "media.cache_readahead_limit.cellular",
-  MediaCacheCellularReadaheadLimit,
+   media_cache_readahead_limit_cellular,
   RelaxedAtomicUint32, 30
 )
 
 // AudioSink
 VARCACHE_PREF(
   Live,
   "media.resampling.enabled",
-  MediaResamplingEnabled,
+   media_resampling_enabled,
   RelaxedAtomicBool, false
 )
 
 #if defined(XP_WIN) || defined(XP_DARWIN) || defined(MOZ_PULSEAUDIO)
 // libcubeb backend implement .get_preferred_channel_layout
 # define PREF_VALUE false
 #else
 # define PREF_VALUE true
 #endif
 VARCACHE_PREF(
   Live,
   "media.forcestereo.enabled",
-  MediaForcestereoEnabled,
+   media_forcestereo_enabled,
   RelaxedAtomicBool, PREF_VALUE
 )
 #undef PREF_VALUE
 
 // VideoSink
 VARCACHE_PREF(
   Live,
   "media.ruin-av-sync.enabled",
-  MediaRuinAvSyncEnabled,
+   media_ruin_av_sync_enabled,
   RelaxedAtomicBool, false
 )
 
 // Encrypted Media Extensions
 #if defined(ANDROID)
 # if defined(NIGHTLY_BUILD)
 #  define PREF_VALUE true
 # else
@@ -5615,17 +5601,17 @@ VARCACHE_PREF(
   // granted.
 # define PREF_VALUE false
 #else
 # define PREF_VALUE true
 #endif
 VARCACHE_PREF(
   Live,
   "media.eme.enabled",
-  MediaEmeEnabled,
+   media_eme_enabled,
   bool, PREF_VALUE
 )
 #undef PREF_VALUE
 
 // Whether we expose the functionality proposed in
 // https://github.com/WICG/encrypted-media-encryption-scheme/blob/master/explainer.md
 // I.e. if true, apps calling navigator.requestMediaKeySystemAccess() can pass
 // an optional encryption scheme as part of MediaKeySystemMediaCapability
@@ -5636,51 +5622,51 @@ VARCACHE_PREF(
   "media.eme.encrypted-media-encryption-scheme.enabled",
   media_eme_encrypted_media_encryption_scheme_enabled,
   bool, false
 )
 
 VARCACHE_PREF(
   Live,
   "media.clearkey.persistent-license.enabled",
-  MediaClearkeyPersistentLicenseEnabled,
+   media_clearkey_persistent_license_enabled,
   bool, false
 )
 
 #if defined(XP_LINUX) && defined(MOZ_SANDBOX)
 // Whether to allow, on a Linux system that doesn't support the necessary
 // sandboxing features, loading Gecko Media Plugins unsandboxed.  However, EME
 // CDMs will not be loaded without sandboxing even if this pref is changed.
 VARCACHE_PREF(
   Live,
   "media.gmp.insecure.allow",
-  MediaGmpInsecureAllow,
+   media_gmp_insecure_allow,
   RelaxedAtomicBool, false
 )
 #endif
 
 // Specifies whether the PDMFactory can create a test decoder that just outputs
 // blank frames/audio instead of actually decoding. The blank decoder works on
 // all platforms.
 VARCACHE_PREF(
   Live,
   "media.use-blank-decoder",
-  MediaUseBlankDecoder,
+   media_use_blank_decoder,
   RelaxedAtomicBool, false
 )
 
 #if defined(XP_WIN)
 # define PREF_VALUE true
 #else
 # define PREF_VALUE false
 #endif
 VARCACHE_PREF(
   Live,
   "media.gpu-process-decoder",
-  MediaGpuProcessDecoder,
+   media_gpu_process_decoder,
   RelaxedAtomicBool, PREF_VALUE
 )
 #undef PREF_VALUE
 
 #if defined(XP_WIN)
 # if defined(_ARM64_) || defined(__MINGW32__)
 #  define PREF_VALUE false
 # else
@@ -5691,294 +5677,294 @@ VARCACHE_PREF(
 #elif defined(XP_LINUX) && !defined(ANDROID)
 # define PREF_VALUE true
 #else
 # define PREF_VALUE false
 #endif
 VARCACHE_PREF(
   Live,
   "media.rdd-process.enabled",
-  MediaRddProcessEnabled,
+   media_rdd_process_enabled,
   RelaxedAtomicBool, PREF_VALUE
 )
 #undef PREF_VALUE
 
 VARCACHE_PREF(
   Live,
   "media.rdd-process.startup_timeout_ms",
-  MediaRddProcessStartupTimeoutMs,
+   media_rdd_process_startup_timeout_ms,
   RelaxedAtomicInt32, 5000
 )
 
 #if defined(XP_LINUX) && !defined(ANDROID)
 # define PREF_VALUE true
 #elif defined(XP_WIN) && !defined(_ARM64_)
 # define PREF_VALUE false
 #elif defined(XP_MACOSX)
 # define PREF_VALUE false
 #else
 # define PREF_VALUE false
 #endif
 VARCACHE_PREF(
   Live,
   "media.rdd-vorbis.enabled",
-  MediaRddVorbisEnabled,
+   media_rdd_vorbis_enabled,
   RelaxedAtomicBool, PREF_VALUE
 )
 #undef PREF_VALUE
 
 #ifdef ANDROID
 
 // Enable the MediaCodec PlatformDecoderModule by default.
 VARCACHE_PREF(
   Live,
   "media.android-media-codec.enabled",
-  MediaAndroidMediaCodecEnabled,
+   media_android_media_codec_enabled,
   RelaxedAtomicBool, true
 )
 
 VARCACHE_PREF(
   Live,
   "media.android-media-codec.preferred",
-  MediaAndroidMediaCodecPreferred,
+   media_android_media_codec_preferred,
   RelaxedAtomicBool, true
 )
 
 #endif // ANDROID
 
 #ifdef MOZ_OMX
 VARCACHE_PREF(
   Live,
   "media.omx.enabled",
-  MediaOmxEnabled,
+   media_omx_enabled,
   bool, false
 )
 #endif
 
 #ifdef MOZ_FFMPEG
 
 # if defined(XP_MACOSX)
 #  define PREF_VALUE false
 # else
 #  define PREF_VALUE true
 # endif
 VARCACHE_PREF(
   Live,
   "media.ffmpeg.enabled",
-  MediaFfmpegEnabled,
+   media_ffmpeg_enabled,
   RelaxedAtomicBool, PREF_VALUE
 )
 #undef PREF_VALUE
 
 VARCACHE_PREF(
   Live,
   "media.libavcodec.allow-obsolete",
-  MediaLibavcodecAllowObsolete,
+   media_libavcodec_allow_obsolete,
   bool, false
 )
 
 #endif // MOZ_FFMPEG
 
 #ifdef MOZ_FFVPX
 VARCACHE_PREF(
   Live,
   "media.ffvpx.enabled",
-  MediaFfvpxEnabled,
+   media_ffvpx_enabled,
   RelaxedAtomicBool, true
 )
 #endif
 
 #if defined(MOZ_FFMPEG) || defined(MOZ_FFVPX)
 VARCACHE_PREF(
   Live,
   "media.ffmpeg.low-latency.enabled",
-  MediaFfmpegLowLatencyEnabled,
+   media_ffmpeg_low_latency_enabled,
   RelaxedAtomicBool, false
 )
 #endif
 
 #ifdef MOZ_WMF
 
 VARCACHE_PREF(
   Live,
   "media.wmf.enabled",
-  MediaWmfEnabled,
+   media_wmf_enabled,
   RelaxedAtomicBool, true
 )
 
 // Whether DD should consider WMF-disabled a WMF failure, useful for testing.
 VARCACHE_PREF(
   Live,
   "media.decoder-doctor.wmf-disabled-is-failure",
-  MediaDecoderDoctorWmfDisabledIsFailure,
+   media_decoder_doctor_wmf_disabled_is_failure,
   bool, false
 )
 
 VARCACHE_PREF(
   Live,
   "media.wmf.dxva.d3d11.enabled",
-  PDMWMFAllowD3D11,
+   media_wmf_dxva_d3d11_enabled,
   RelaxedAtomicBool, true
 )
 
 VARCACHE_PREF(
   Live,
   "media.wmf.dxva.max-videos",
-  PDMWMFMaxDXVAVideos,
+   media_wmf_dxva_max_videos,
   RelaxedAtomicUint32, 8
 )
 
 VARCACHE_PREF(
   Live,
   "media.wmf.use-nv12-format",
-  PDMWMFUseNV12Format,
+   media_wmf_use_nv12_format,
   RelaxedAtomicBool, true
 )
 
 VARCACHE_PREF(
   Live,
   "media.wmf.force.allow-p010-format",
-  PDMWMFForceAllowP010Format,
+   media_wmf_force_allow_p010_format,
   RelaxedAtomicBool, false
 )
 
 VARCACHE_PREF(
   Once,
   "media.wmf.use-sync-texture",
-  PDMWMFUseSyncTexture,
+   media_wmf_use_sync_texture,
   bool, true
 )
 
 VARCACHE_PREF(
   Live,
   "media.wmf.low-latency.enabled",
-  PDMWMFLowLatencyEnabled,
+   media_wmf_low_latency_enabled,
   RelaxedAtomicBool, false
 )
 
 VARCACHE_PREF(
   Live,
   "media.wmf.low-latency.force-disabled",
-  PDMWMFLowLatencyForceDisabled,
+   media_mwf_low_latency_force_disabled,
   RelaxedAtomicBool, false
 )
 
 VARCACHE_PREF(
   Live,
   "media.wmf.skip-blacklist",
-  PDMWMFSkipBlacklist,
+   media_wmf_skip_blacklist,
   RelaxedAtomicBool, false
 )
 
 VARCACHE_PREF(
   Live,
   "media.wmf.deblacklisting-for-telemetry-in-gpu-process",
-  PDMWMFDeblacklistingForTelemetryInGPUProcess,
+   media_wmf_deblacklisting_for_telemetry_in_gpu_process,
   RelaxedAtomicBool, false
 )
 
 VARCACHE_PREF(
   Live,
   "media.wmf.amd.highres.enabled",
-  PDMWMFAMDHighResEnabled,
+   media_wmf_amd_highres_enabled,
   RelaxedAtomicBool, true
 )
 
 VARCACHE_PREF(
   Live,
   "media.wmf.allow-unsupported-resolutions",
-  PDMWMFAllowUnsupportedResolutions,
+   media_wmf_allow_unsupported_resolutions,
   RelaxedAtomicBool, false
 )
 
 VARCACHE_PREF(
   Once,
   "media.wmf.vp9.enabled",
-  MediaWmfVp9Enabled,
+   media_wmf_vp9_enabled,
   bool, true
 )
 #endif // MOZ_WMF
 
 VARCACHE_PREF(
   Once,
   "media.hardware-video-decoding.force-enabled",
-  HardwareVideoDecodingForceEnabled,
+   media_hardware_video_decoding_force_enabled,
   bool, false
 )
 
 // Whether to check the decoder supports recycling.
 #ifdef ANDROID
 # define PREF_VALUE true
 #else
 # define PREF_VALUE false
 #endif
 VARCACHE_PREF(
   Live,
   "media.decoder.recycle.enabled",
-  MediaDecoderRecycleEnabled,
+   media_decoder_recycle_enabled,
   RelaxedAtomicBool, PREF_VALUE
 )
 #undef PREF_VALUE
 
 // Should MFR try to skip to the next key frame?
 VARCACHE_PREF(
   Live,
   "media.decoder.skip-to-next-key-frame.enabled",
-  MediaDecoderSkipToNextKeyFrameEnabled,
+   media_decoder_skip_to_next_key_frame_enabled,
   RelaxedAtomicBool, true
 )
 
 VARCACHE_PREF(
   Live,
   "media.gmp.decoder.enabled",
-  MediaGmpDecoderEnabled,
+   media_gmp_decoder_enabled,
   RelaxedAtomicBool, false
 )
 
 VARCACHE_PREF(
   Live,
   "media.eme.audio.blank",
-  MediaEmeAudioBlank,
+   media_eme_audio_blank,
   RelaxedAtomicBool, false
 )
 VARCACHE_PREF(
   Live,
   "media.eme.video.blank",
-  MediaEmeVideoBlank,
+   media_eme_video_blank,
   RelaxedAtomicBool, false
 )
 
 VARCACHE_PREF(
   Live,
   "media.eme.chromium-api.video-shmems",
-  MediaEmeChromiumApiVideoShmems,
+   media_eme_chromium_api_video_shmems,
   RelaxedAtomicUint32, 6
 )
 
 // Whether to suspend decoding of videos in background tabs.
 VARCACHE_PREF(
   Live,
   "media.suspend-bkgnd-video.enabled",
-  MediaSuspendBkgndVideoEnabled,
+   media_suspend_bkgnd_video_enabled,
   RelaxedAtomicBool, true
 )
 
 // Delay, in ms, from time window goes to background to suspending
 // video decoders. Defaults to 10 seconds.
 VARCACHE_PREF(
   Live,
   "media.suspend-bkgnd-video.delay-ms",
-  MediaSuspendBkgndVideoDelayMs,
+   media_suspend_bkgnd_video_delay_ms,
   RelaxedAtomicUint32, 10000
 )
 
 VARCACHE_PREF(
   Live,
   "media.dormant-on-pause-timeout-ms",
-  MediaDormantOnPauseTimeoutMs,
+   media_dormant_on_pause_timeout_ms,
   RelaxedAtomicInt32, 5000
 )
 
 // AudioTrack and VideoTrack support
 VARCACHE_PREF(
   Live,
   "media.track.enabled",
   media_track_enabled,
@@ -5991,54 +5977,54 @@ VARCACHE_PREF(
   "media.webvtt.regions.enabled",
   media_webvtt_regions_enabled,
   bool, true
 )
 
 VARCACHE_PREF(
   Live,
   "media.webspeech.synth.force_global_queue",
-  MediaWebspeechSynthForceGlobalQueue,
+   media_webspeech_synth_force_global_queue,
   bool, false
 )
 
 VARCACHE_PREF(
   Live,
   "media.webspeech.test.enable",
-  MediaWebspeechTestEnable,
+   media_webspeech_test_enable,
   bool, false
 )
 
 VARCACHE_PREF(
   Live,
   "media.webspeech.test.fake_fsm_events",
-  MediaWebspeechTextFakeFsmEvents,
+   media_webspeech_test_fake_fsm_events,
   bool, false
 )
 
 VARCACHE_PREF(
   Live,
   "media.webspeech.test.fake_recognition_service",
-  MediaWebspeechTextFakeRecognitionService,
+   media_webspeech_test_fake_recognition_service,
   bool, false
 )
 
 #ifdef MOZ_WEBSPEECH
 VARCACHE_PREF(
   Live,
   "media.webspeech.recognition.enable",
   media_webspeech_recognition_enable,
   bool, false
 )
 #endif
 
 VARCACHE_PREF(
   Live,
   "media.webspeech.recognition.force_enable",
-  MediaWebspeechRecognitionForceEnable,
+   media_webspeech_recognition_force_enable,
   bool, false
 )
 
 #ifdef MOZ_WEBSPEECH
 VARCACHE_PREF(
   Live,
   "media.webspeech.synth.enabled",
   media_webspeech_synth_enabled,
@@ -6049,175 +6035,175 @@ VARCACHE_PREF(
 #if defined(MOZ_WEBM_ENCODER)
 # define PREF_VALUE true
 #else
 # define PREF_VALUE false
 #endif
 VARCACHE_PREF(
   Live,
   "media.encoder.webm.enabled",
-  MediaEncoderWebMEnabled,
+   media_encoder_webm_enabled,
   RelaxedAtomicBool, true
 )
 #undef PREF_VALUE
 
 #if defined(RELEASE_OR_BETA)
 # define PREF_VALUE 3
 #else
   // Zero tolerance in pre-release builds to detect any decoder regression.
 # define PREF_VALUE 0
 #endif
 VARCACHE_PREF(
   Live,
   "media.audio-max-decode-error",
-  MediaAudioMaxDecodeError,
+   media_audio_max_decode_error,
   uint32_t, PREF_VALUE
 )
 #undef PREF_VALUE
 
 #if defined(RELEASE_OR_BETA)
 # define PREF_VALUE 2
 #else
   // Zero tolerance in pre-release builds to detect any decoder regression.
 # define PREF_VALUE 0
 #endif
 VARCACHE_PREF(
   Live,
   "media.video-max-decode-error",
-  MediaVideoMaxDecodeError,
+   media_video_max_decode_error,
   uint32_t, PREF_VALUE
 )
 #undef PREF_VALUE
 
 // Opus
 VARCACHE_PREF(
   Live,
   "media.opus.enabled",
-  MediaOpusEnabled,
+   media_opus_enabled,
   RelaxedAtomicBool, true
 )
 
 // Wave
 VARCACHE_PREF(
   Live,
   "media.wave.enabled",
-  MediaWaveEnabled,
+   media_wave_enabled,
   RelaxedAtomicBool, true
 )
 
 // Ogg
 VARCACHE_PREF(
   Live,
   "media.ogg.enabled",
-  MediaOggEnabled,
+   media_ogg_enabled,
   RelaxedAtomicBool, true
 )
 
 // WebM
 VARCACHE_PREF(
   Live,
   "media.webm.enabled",
-  MediaWebMEnabled,
+   media_webm_enabled,
   RelaxedAtomicBool, true
 )
 
 // AV1
 #if defined(XP_WIN) && !defined(_ARM64_)
 # define PREF_VALUE true
 #elif defined(XP_MACOSX)
 # define PREF_VALUE true
 #elif defined(XP_UNIX) && !defined(Android)
 # define PREF_VALUE true
 #else
 # define PREF_VALUE false
 #endif
 VARCACHE_PREF(
   Live,
   "media.av1.enabled",
-  MediaAv1Enabled,
+   media_av1_enabled,
   RelaxedAtomicBool, PREF_VALUE
 )
 #undef PREF_VALUE
 
 VARCACHE_PREF(
   Live,
   "media.av1.use-dav1d",
-  MediaAv1UseDav1d,
+   media_av1_use_dav1d,
 #if defined(XP_WIN) && !defined(_ARM64_)
   RelaxedAtomicBool, true
 #elif defined(XP_MACOSX)
   RelaxedAtomicBool, true
 #elif defined(XP_UNIX) && !defined(Android)
   RelaxedAtomicBool, true
 #else
   RelaxedAtomicBool, false
 #endif
 )
 
 VARCACHE_PREF(
   Live,
   "media.flac.enabled",
-  MediaFlacEnabled,
+   media_flac_enabled,
   bool, true
 )
 
 // Hls
 #ifdef ANDROID
 # define PREF_VALUE true
 #else
 # define PREF_VALUE false
 #endif
 VARCACHE_PREF(
   Live,
   "media.hls.enabled",
-  MediaHlsEnabled,
+   media_hls_enabled,
   bool, PREF_VALUE
 )
 #undef PREF_VALUE
 
 // Max number of HLS players that can be created concurrently. Used only on
 // Android and when "media.hls.enabled" is true.
 #ifdef ANDROID
 VARCACHE_PREF(
   Live,
   "media.hls.max-allocations",
-  MediaHlsMaxAllocations,
+   media_hls_max_allocations,
   uint32_t, 20
 )
 #endif
 
 #ifdef MOZ_FMP4
 # define PREF_VALUE true
 #else
 # define PREF_VALUE false
 #endif
 VARCACHE_PREF(
   Live,
   "media.mp4.enabled",
-  MediaMp4Enabled,
+   media_mp4_enabled,
   RelaxedAtomicBool, PREF_VALUE
 )
 #undef PREF_VALUE
 
 // Error/warning handling, Decoder Doctor.
 //
 // Set to true to force demux/decode warnings to be treated as errors.
 VARCACHE_PREF(
   Live,
   "media.playback.warnings-as-errors",
-  MediaPlaybackWarningsAsErrors,
+   media_playback_warnings_as_errors,
   RelaxedAtomicBool, false
 )
 
 // Resume video decoding when the cursor is hovering on a background tab to
 // reduce the resume latency and improve the user experience.
 VARCACHE_PREF(
   Live,
   "media.resume-bkgnd-video-on-tabhover",
-  MediaResumeBkgndVideoOnTabhover,
+   media_resume_bkgnd_video_on_tabhover,
   bool, true
 )
 
 #ifdef ANDROID
 # define PREF_VALUE true
 #else
 # define PREF_VALUE false
 #endif
@@ -6228,73 +6214,73 @@ VARCACHE_PREF(
   bool, PREF_VALUE
 )
 #undef PREF_VALUE
 
 // Media Seamless Looping
 VARCACHE_PREF(
   Live,
   "media.seamless-looping",
-  MediaSeamlessLooping,
+   media_seamless_looping,
   RelaxedAtomicBool, true
 )
 
 VARCACHE_PREF(
   Live,
   "media.autoplay.block-event.enabled",
-  MediaBlockEventEnabled,
+   media_autoplay_block_event_enabled,
   bool, false
 )
 
 VARCACHE_PREF(
   Live,
   "media.media-capabilities.enabled",
-  MediaCapabilitiesEnabled,
+   media_media_capabilities_enabled,
   RelaxedAtomicBool, true
 )
 
 VARCACHE_PREF(
   Live,
   "media.media-capabilities.screen.enabled",
-  MediaCapabilitiesScreenEnabled,
+   media_media_capabilities_screen_enabled,
   RelaxedAtomicBool, false
 )
 
 VARCACHE_PREF(
   Live,
   "media.benchmark.vp9.fps",
-  MediaBenchmarkVp9Fps,
+   media_benchmark_vp9_fps,
   RelaxedAtomicUint32, 0
 )
 
 VARCACHE_PREF(
   Live,
   "media.benchmark.vp9.threshold",
-  MediaBenchmarkVp9Threshold,
+   media_benchmark_vp9_threshold,
   RelaxedAtomicUint32, 150
 )
 
 VARCACHE_PREF(
   Live,
   "media.benchmark.vp9.versioncheck",
-  MediaBenchmarkVp9Versioncheck,
+   media_benchmark_vp9_versioncheck,
   RelaxedAtomicUint32, 0
 )
 
 VARCACHE_PREF(
   Live,
   "media.benchmark.frames",
-  MediaBenchmarkFrames,
+   media_benchmark_frames,
   RelaxedAtomicUint32, 300
 )
 
 VARCACHE_PREF(
   Live,
   "media.benchmark.timeout",
-  MediaBenchmarkTimeout,
+   media_benchmark_timeout,
   RelaxedAtomicUint32, 1000
 )
 
 VARCACHE_PREF(
   Live,
   "media.test.video-suspend",
   media_test_video_suspend,
   RelaxedAtomicBool, false
@@ -6363,17 +6349,17 @@ VARCACHE_PREF(
 )
 
 #ifdef MOZ_WEBRTC
 #ifdef ANDROID
 
 VARCACHE_PREF(
   Live,
   "media.navigator.hardware.vp8_encode.acceleration_remote_enabled",
-  MediaNavigatorHardwareVp8encodeAccelerationRemoteEnabled,
+   media_navigator_hardware_vp8_encode_acceleration_remote_enabled,
   bool, true
 )
 
 PREF("media.navigator.hardware.vp8_encode.acceleration_enabled", bool, true)
 
 PREF("media.navigator.hardware.vp8_decode.acceleration_enabled", bool, false)
 
 #endif // ANDROID
@@ -6384,33 +6370,33 @@ PREF("media.navigator.hardware.vp8_decod
 #if defined(NIGHTLY_BUILD) && !defined(ANDROID)
 # define PREF_VALUE true
 #else
 # define PREF_VALUE false
 #endif
 VARCACHE_PREF(
   Live,
   "media.navigator.mediadatadecoder_vpx_enabled",
-  MediaNavigatorMediadatadecoderVPXEnabled,
+   media_navigator_mediadatadecoder_vpx_enabled,
   RelaxedAtomicBool, PREF_VALUE
 )
 #undef PREF_VALUE
 
 // Use MediaDataDecoder API for H264 in WebRTC. This includes hardware
 // acceleration for decoding.
 # if defined(ANDROID)
 #  define PREF_VALUE false // Bug 1509316
 # else
 #  define PREF_VALUE true
 # endif
 
 VARCACHE_PREF(
   Live,
   "media.navigator.mediadatadecoder_h264_enabled",
-  MediaNavigatorMediadatadecoderH264Enabled,
+   media_navigator_mediadatadecoder_h264_enabled,
   RelaxedAtomicBool, PREF_VALUE
 )
 #undef PREF_VALUE
 
 #endif // MOZ_WEBRTC
 
 // HTMLMediaElement.allowedToPlay should be exposed to web content when
 // block autoplay rides the trains to release. Until then, Nightly only.
@@ -7253,278 +7239,273 @@ VARCACHE_PREF(
 
 //---------------------------------------------------------------------------
 // Prefs starting with "webgl." (for pref access from Worker threads)
 //---------------------------------------------------------------------------
 
 VARCACHE_PREF(
   Live,
   "webgl.1.allow-core-profiles",
-  WebGL1AllowCoreProfile,
+   webgl_1_allow_core_profiles,
   RelaxedAtomicBool, false
 )
 
 
 VARCACHE_PREF(
   Live,
   "webgl.all-angle-options",
-  WebGLAllANGLEOptions,
+   webgl_all_angle_options,
   RelaxedAtomicBool, false
 )
 
 VARCACHE_PREF(
   Live,
   "webgl.angle.force-d3d11",
-  WebGLANGLEForceD3D11,
+   webgl_angle_force_d3d11,
   RelaxedAtomicBool, false
 )
 
 VARCACHE_PREF(
   Live,
   "webgl.angle.try-d3d11",
-  WebGLANGLETryD3D11,
+   webgl_angle_try_d3d11,
   RelaxedAtomicBool, false
 )
 
 VARCACHE_PREF(
   Live,
   "webgl.angle.force-warp",
-  WebGLANGLEForceWARP,
+   webgl_angle_force_warp,
   RelaxedAtomicBool, false
 )
 
 VARCACHE_PREF(
   Live,
   "webgl.can-lose-context-in-foreground",
-  WebGLCanLoseContextInForeground,
+   webgl_can_lose_context_in_foreground,
   RelaxedAtomicBool, true
 )
 
 #ifdef MOZ_WIDGET_ANDROID
 # define PREF_VALUE false
 #else
 # define PREF_VALUE true
 #endif
 VARCACHE_PREF(
   Live,
   "webgl.default-antialias",
-  WebGLDefaultAntialias,
+   webgl_default_antialias,
   RelaxedAtomicBool, PREF_VALUE
 )
 #undef PREF_VALUE
 
 VARCACHE_PREF(
   Live,
   "webgl.default-low-power",
-  WebGLDefaultLowPower,
+   webgl_default_low_power,
   RelaxedAtomicBool, false
 )
 
 VARCACHE_PREF(
   Live,
   "webgl.default-no-alpha",
-  WebGLDefaultNoAlpha,
+   webgl_default_no_alpha,
   RelaxedAtomicBool, false
 )
 
 VARCACHE_PREF(
   Live,
   "webgl.disable-angle",
-  WebGLDisableANGLE,
+   webgl_disable_angle,
   RelaxedAtomicBool, false
 )
 
 VARCACHE_PREF(
   Live,
   "webgl.disable-wgl",
-  WebGLDisableWGL,
+   webgl_disable_wgl,
   RelaxedAtomicBool, false
 )
 
 VARCACHE_PREF(
   Live,
   "webgl.disable-extensions",
-  WebGLDisableExtensions,
+   webgl_disable_extensions,
   RelaxedAtomicBool, false
 )
 
 VARCACHE_PREF(
   Live,
   "webgl.dxgl.enabled",
-  WebGLDXGLEnabled,
+   webgl_dxgl_enabled,
   RelaxedAtomicBool, false
 )
 
 VARCACHE_PREF(
   Live,
   "webgl.dxgl.needs-finish",
-  WebGLDXGLNeedsFinish,
+   webgl_dxgl_needs_finish,
   RelaxedAtomicBool, false
 )
 
 
 VARCACHE_PREF(
   Live,
   "webgl.disable-fail-if-major-performance-caveat",
-  WebGLDisableFailIfMajorPerformanceCaveat,
+   webgl_disable_fail_if_major_performance_caveat,
   RelaxedAtomicBool, false
 )
 
 VARCACHE_PREF(
   Live,
   "webgl.disable-DOM-blit-uploads",
-  WebGLDisableDOMBlitUploads,
+   webgl_disable_DOM_blit_uploads,
   RelaxedAtomicBool, false
 )
 
 VARCACHE_PREF(
   Live,
   "webgl.disabled",
-  WebGLDisabled,
+   webgl_disabled,
   RelaxedAtomicBool, false
 )
 
 VARCACHE_PREF(
   Live,
   "webgl.enable-draft-extensions",
-  WebGLDraftExtensionsEnabled,
+   webgl_enable_draft_extensions,
   RelaxedAtomicBool, false
 )
 
 VARCACHE_PREF(
   Live,
   "webgl.enable-privileged-extensions",
-  WebGLPrivilegedExtensionsEnabled,
+   webgl_enable_privileged_extensions,
   RelaxedAtomicBool, false
 )
 
 VARCACHE_PREF(
   Live,
   "webgl.enable-surface-texture",
-  WebGLSurfaceTextureEnabled,
+   webgl_enable_surface_texture,
   RelaxedAtomicBool, false
 )
 
 VARCACHE_PREF(
   Live,
   "webgl.enable-webgl2",
   webgl_enable_webgl2,
   RelaxedAtomicBool, true
 )
 
 VARCACHE_PREF(
   Live,
   "webgl.force-enabled",
-  WebGLForceEnabled,
+   webgl_force_enabled,
   RelaxedAtomicBool, false
 )
 
 VARCACHE_PREF(
   Live,
   "webgl.force-layers-readback",
-  WebGLForceLayersReadback,
+   webgl_force_layers_readback,
   bool, false
 )
 
 VARCACHE_PREF(
   Live,
   "webgl.force-index-validation",
-  WebGLForceIndexValidation,
+   webgl_force_index_validation,
   RelaxedAtomicInt32, 0
 )
 
 VARCACHE_PREF(
   Live,
   "webgl.lose-context-on-memory-pressure",
-  WebGLLoseContextOnMemoryPressure,
+   webgl_lose_context_on_memory_pressure,
   RelaxedAtomicBool, false
 )
 
 VARCACHE_PREF(
   Live,
   "webgl.max-contexts",
-  WebGLMaxContexts,
+   webgl_max_contexts,
   RelaxedAtomicUint32, 32
 )
 
 VARCACHE_PREF(
   Live,
   "webgl.max-contexts-per-principal",
-  WebGLMaxContextsPerPrincipal,
+   webgl_max_contexts_per_principal,
   RelaxedAtomicUint32, 16
 )
 
 VARCACHE_PREF(
   Live,
   "webgl.max-warnings-per-context",
-  WebGLMaxWarningsPerContext,
+   webgl_max_warnings_per_context,
   RelaxedAtomicUint32, 32
 )
 
 VARCACHE_PREF(
   Live,
   "webgl.min_capability_mode",
-  WebGLMinCapabilityMode,
+   webgl_min_capability_mode,
   RelaxedAtomicBool, false
 )
 
 VARCACHE_PREF(
   Live,
   "webgl.msaa-force",
-  WebGLForceMSAA,
+   webgl_msaa_force,
   RelaxedAtomicBool, false
 )
 
 VARCACHE_PREF(
   Live,
   "webgl.msaa-samples",
-  WebGLMsaaSamples,
+   webgl_msaa_samples,
   RelaxedAtomicUint32, 4
 )
 
-VARCACHE_PREF(
-  Live,
-  "webgl.prefer-16bpp",
-  WebGLPrefer16bpp,
-  RelaxedAtomicBool, false
-)
+PREF("webgl.prefer-16bpp", bool, false)
 
 VARCACHE_PREF(
   Live,
   "webgl.allow-immediate-queries",
-  WebGLImmediateQueries,
+   webgl_allow_immediate_queries,
   RelaxedAtomicBool, false
 )
 
 VARCACHE_PREF(
   Live,
   "webgl.allow-fb-invalidation",
-  WebGLFBInvalidation,
+   webgl_allow_fb_invalidation,
   RelaxedAtomicBool, false
 )
 
 
 VARCACHE_PREF(
   Live,
   "webgl.perf.max-warnings",
-  WebGLMaxPerfWarnings,
+   webgl_perf_max_warnings,
   RelaxedAtomicInt32, 0
 )
 
 VARCACHE_PREF(
   Live,
   "webgl.perf.max-acceptable-fb-status-invals",
-  WebGLMaxAcceptableFBStatusInvals,
+   webgl_perf_max_acceptable_fb_status_invals,
   RelaxedAtomicInt32, 0
 )
 
 VARCACHE_PREF(
   Live,
   "webgl.perf.spew-frame-allocs",
-  WebGLSpewFrameAllocs,
+   webgl_perf_spew_frame_allocs,
   RelaxedAtomicBool, true
 )
 
 //---------------------------------------------------------------------------
 // Prefs starting with "widget."
 //---------------------------------------------------------------------------
 
 VARCACHE_PREF(