Bug 1512280 - Make LOG macros clang-format friendlier. r=padenot
☠☠ backed out by 4ef33177d5ba ☠ ☠
authorJan-Ivar Bruaroey <jib@mozilla.com>
Wed, 12 Dec 2018 02:36:51 +0000
changeset 507313 9a68c5f1ca0d0dc38e70e448aa2043c1569d27c3
parent 507312 616721cad8938767508b3dc34e0a98f53c47d600
child 507314 9463a1a3d790eaba8d61112c8dac961349a3da55
push id10547
push userffxbld-merge
push dateMon, 21 Jan 2019 13:03:58 +0000
treeherdermozilla-beta@24ec1916bffe [default view] [failures only]
perfherder[talos] [build metrics] [platform microbench] (compared to previous push)
reviewerspadenot
bugs1512280
milestone66.0a1
first release with
nightly linux32
nightly linux64
nightly mac
nightly win32
nightly win64
last release without
nightly linux32
nightly linux64
nightly mac
nightly win32
nightly win64
Bug 1512280 - Make LOG macros clang-format friendlier. r=padenot Differential Revision: https://phabricator.services.mozilla.com/D13859
dom/media/MediaManager.cpp
dom/media/MediaManager.h
dom/media/webrtc/MediaEngineRemoteVideoSource.cpp
dom/media/webrtc/MediaEngineWebRTCAudio.cpp
dom/media/webrtc/MediaTrackConstraints.cpp
--- a/dom/media/MediaManager.cpp
+++ b/dom/media/MediaManager.cpp
@@ -188,21 +188,18 @@ already_AddRefed<nsIAsyncShutdownClient>
 }  // namespace
 
 namespace mozilla {
 
 #ifdef LOG
 #undef LOG
 #endif
 
-LogModule* GetMediaManagerLog() {
-  static LazyLogModule sLog("MediaManager");
-  return sLog;
-}
-#define LOG(msg) MOZ_LOG(GetMediaManagerLog(), mozilla::LogLevel::Debug, msg)
+LazyLogModule gMediaManagerLog("MediaManager");
+#define LOG(...) MOZ_LOG(gMediaManagerLog, LogLevel::Debug, (__VA_ARGS__))
 
 using dom::BasicTrackSource;
 using dom::ConstrainDOMStringParameters;
 using dom::File;
 using dom::GetUserMediaRequest;
 using dom::MediaSourceEnum;
 using dom::MediaStreamConstraints;
 using dom::MediaStreamError;
@@ -637,17 +634,17 @@ class GetUserMediaWindowListener {
         obs->NotifyObservers(req, "recording-device-stopped", nullptr);
       }
       return;
     }
 
     MOZ_ASSERT(windowListener == this,
                "There should only be one window listener per window ID");
 
-    LOG(("GUMWindowListener %p removing windowID %" PRIu64, this, mWindowID));
+    LOG("GUMWindowListener %p removing windowID %" PRIu64, this, mWindowID);
     mgr->RemoveWindowID(mWindowID);
   }
 
   bool Remove(SourceListener* aListener) {
     MOZ_ASSERT(NS_IsMainThread());
 
     if (!mInactiveListeners.RemoveElement(aListener) &&
         !mActiveListeners.RemoveElement(aListener)) {
@@ -656,17 +653,17 @@ class GetUserMediaWindowListener {
 
     MOZ_ASSERT(!mInactiveListeners.Contains(aListener),
                "A SourceListener should only be once in one of "
                "mInactiveListeners and mActiveListeners");
     MOZ_ASSERT(!mActiveListeners.Contains(aListener),
                "A SourceListener should only be once in one of "
                "mInactiveListeners and mActiveListeners");
 
-    LOG(("GUMWindowListener %p removing SourceListener %p.", this, aListener));
+    LOG("GUMWindowListener %p removing SourceListener %p.", this, aListener);
     aListener->Remove();
 
     if (MediaDevice* removedDevice = aListener->GetVideoDevice()) {
       bool revokeVideoPermission = true;
       nsString removedRawId;
       nsString removedSourceType;
       removedDevice->GetRawId(removedRawId);
       removedDevice->GetMediaSource(removedSourceType);
@@ -718,20 +715,19 @@ class GetUserMediaWindowListener {
             globalWindow ? globalWindow->AsInner() : nullptr;
         RefPtr<GetUserMediaRequest> req =
             new GetUserMediaRequest(window, removedRawId, removedSourceType);
         obs->NotifyObservers(req, "recording-device-stopped", nullptr);
       }
     }
 
     if (mInactiveListeners.Length() == 0 && mActiveListeners.Length() == 0) {
-      LOG(
-          ("GUMWindowListener %p Removed the last SourceListener. "
-           "Cleaning up.",
-           this));
+      LOG("GUMWindowListener %p Removed the last SourceListener. "
+          "Cleaning up.",
+          this);
       RemoveAll();
     }
 
     return true;
   }
 
   void StopSharing();
 
@@ -1148,17 +1144,17 @@ class GetUserMediaStreamRunnable : publi
             mTrack->RemoveListener(this);
 
             if (!mManager->IsWindowListenerStillActive(mWindowListener)) {
               return;
             }
 
             // This is safe since we're on main-thread, and the windowlist can
             // only be invalidated from the main-thread (see OnNavigation)
-            LOG(("Returning success for getUserMedia()"));
+            LOG("Returning success for getUserMedia()");
             mHolder.Resolve(RefPtr<DOMMediaStream>(mStream), __func__);
           });
       // DispatchToMainThreadAfterStreamStateUpdate will make the runnable run
       // in stable state. But since the runnable runs JS we need to make a
       // double dispatch.
       mGraph->DispatchToMainThreadAfterStreamStateUpdate(NS_NewRunnableFunction(
           "TracksCreatedListener::NotifyOutput Stable State Notifier",
           [graph = mGraph, r = std::move(r)]() mutable {
@@ -1187,17 +1183,17 @@ class GetUserMediaStreamRunnable : publi
     nsMainThreadPtrHandle<MediaStreamTrack> mTrack;
     // Graph thread only.
     bool mDispatchedTracksCreated = false;
   };
 
   NS_IMETHOD
   Run() override {
     MOZ_ASSERT(NS_IsMainThread());
-    LOG(("GetUserMediaStreamRunnable::Run()"));
+    LOG("GetUserMediaStreamRunnable::Run()");
     nsGlobalWindowInner* globalWindow =
         nsGlobalWindowInner::GetInnerWindowWithId(mWindowID);
     nsPIDOMWindowInner* window =
         globalWindow ? globalWindow->AsInner() : nullptr;
 
     // We're on main-thread, and the windowlist can only
     // be invalidated from the main-thread (see OnNavigation)
     if (!mManager->IsWindowListenerStillActive(mWindowListener)) {
@@ -1344,17 +1340,17 @@ class GetUserMediaStreamRunnable : publi
         RefPtr<MediaStreamTrack> track = domStream->CreateDOMTrack(
             kVideoTrack, MediaSegment::VIDEO, videoSource,
             GetInvariant(mConstraints.mVideo));
         domStream->AddTrackInternal(track);
       }
     }
 
     if (!domStream || !stream || sHasShutdown) {
-      LOG(("Returning error for getUserMedia() - no stream"));
+      LOG("Returning error for getUserMedia() - no stream");
 
       mHolder.Reject(MakeRefPtr<MediaMgrError>(
                          MediaMgrError::Name::AbortError,
                          sHasShutdown ? NS_LITERAL_STRING("In shutdown")
                                       : NS_LITERAL_STRING("No stream.")),
                      __func__);
       return NS_OK;
     }
@@ -1376,30 +1372,28 @@ class GetUserMediaStreamRunnable : publi
     // because that can take a while.
     // Pass ownership of domStream through the lambda to the nested chrome
     // notification lambda to ensure it's kept alive until that lambda runs or
     // is discarded.
     mSourceListener->InitializeAsync()->Then(
         GetMainThreadSerialEventTarget(), __func__,
         [manager = mManager, windowListener = mWindowListener, track,
          tracksCreatedListener]() {
-          LOG(
-              ("GetUserMediaStreamRunnable::Run: starting success callback "
-               "following InitializeAsync()"));
+          LOG("GetUserMediaStreamRunnable::Run: starting success callback "
+              "following InitializeAsync()");
           // Initiating and starting devices succeeded.
           track->AddListener(tracksCreatedListener);
           windowListener->ChromeAffectingStateChanged();
           manager->SendPendingGUMRequest();
         },
         [manager = mManager, windowID = mWindowID,
          tracksCreatedListener](RefPtr<MediaMgrError>&& aError) {
           MOZ_ASSERT(NS_IsMainThread());
-          LOG(
-              ("GetUserMediaStreamRunnable::Run: starting failure callback "
-               "following InitializeAsync()"));
+          LOG("GetUserMediaStreamRunnable::Run: starting failure callback "
+              "following InitializeAsync()");
           // Initiating and starting devices failed.
 
           // Only run if the window is still active for our window listener.
           if (!(manager->IsWindowStillActive(windowID))) {
             return;
           }
           // This is safe since we're on main-thread, and the windowlist can
           // only be invalidated from the main-thread (see OnNavigation)
@@ -1408,19 +1402,18 @@ class GetUserMediaStreamRunnable : publi
 
     if (!IsPincipalInfoPrivate(mPrincipalInfo)) {
       // Call GetPrincipalKey again, this time w/persist = true, to promote
       // deviceIds to persistent, in case they're not already. Fire'n'forget.
       media::GetPrincipalKey(mPrincipalInfo, true)
           ->Then(GetCurrentThreadSerialEventTarget(), __func__,
                  [](const PrincipalKeyPromise::ResolveOrRejectValue& aValue) {
                    if (aValue.IsReject()) {
-                     LOG(
-                         ("Failed get Principal key. Persisting of deviceIds "
-                          "will be broken"));
+                     LOG("Failed get Principal key. Persisting of deviceIds "
+                         "will be broken");
                    }
                  });
     }
     return NS_OK;
   }
 
  private:
   MozPromiseHolder<MediaManager::StreamPromise> mHolder;
@@ -1438,44 +1431,44 @@ class GetUserMediaStreamRunnable : publi
 // Source getter returning full list
 
 static void GetMediaDevices(MediaEngine* aEngine, uint64_t aWindowId,
                             MediaSourceEnum aSrcType,
                             MediaManager::MediaDeviceSet& aResult,
                             const char* aMediaDeviceName = nullptr) {
   MOZ_ASSERT(MediaManager::IsInMediaThread());
 
-  LOG(("%s: aEngine=%p, aWindowId=%" PRIu64 ", aSrcType=%" PRIu8
-       ", aMediaDeviceName=%s",
-       __func__, aEngine, aWindowId, static_cast<uint8_t>(aSrcType),
-       aMediaDeviceName ? aMediaDeviceName : "null"));
+  LOG("%s: aEngine=%p, aWindowId=%" PRIu64 ", aSrcType=%" PRIu8
+      ", aMediaDeviceName=%s",
+      __func__, aEngine, aWindowId, static_cast<uint8_t>(aSrcType),
+      aMediaDeviceName ? aMediaDeviceName : "null");
   nsTArray<RefPtr<MediaDevice>> devices;
   aEngine->EnumerateDevices(aWindowId, aSrcType, MediaSinkEnum::Other,
                             &devices);
 
   /*
    * We're allowing multiple tabs to access the same camera for parity
    * with Chrome.  See bug 811757 for some of the issues surrounding
    * this decision.  To disallow, we'd filter by IsAvailable() as we used
    * to.
    */
   if (aMediaDeviceName && *aMediaDeviceName) {
     for (auto& device : devices) {
       if (device->mName.EqualsASCII(aMediaDeviceName)) {
         aResult.AppendElement(device);
-        LOG(("%s: found aMediaDeviceName=%s", __func__, aMediaDeviceName));
+        LOG("%s: found aMediaDeviceName=%s", __func__, aMediaDeviceName);
         break;
       }
     }
   } else {
     aResult = devices;
-    if (MOZ_LOG_TEST(GetMediaManagerLog(), mozilla::LogLevel::Debug)) {
+    if (MOZ_LOG_TEST(gMediaManagerLog, mozilla::LogLevel::Debug)) {
       for (auto& device : devices) {
-        LOG(("%s: appending device=%s", __func__,
-             NS_ConvertUTF16toUTF8(device->mName).get()));
+        LOG("%s: appending device=%s", __func__,
+            NS_ConvertUTF16toUTF8(device->mName).get());
       }
     }
   }
 }
 
 RefPtr<MediaManager::BadConstraintsPromise> MediaManager::SelectSettings(
     const MediaStreamConstraints& aConstraints, bool aIsChrome,
     const RefPtr<MediaDeviceSetRefCnt>& aSources) {
@@ -1586,17 +1579,17 @@ class GetUserMediaTask : public Runnable
         "GetUserMediaWindowListener::Remove", mWindowListener,
         &GetUserMediaWindowListener::Remove, mSourceListener));
   }
 
   NS_IMETHOD
   Run() override {
     MOZ_ASSERT(!NS_IsMainThread());
     MOZ_ASSERT(mDeviceChosen);
-    LOG(("GetUserMediaTask::Run()"));
+    LOG("GetUserMediaTask::Run()");
 
     // Allocate a video or audio device and return a MediaStream via
     // a GetUserMediaStreamRunnable.
 
     nsresult rv;
     const char* errorMsg = nullptr;
     const char* badConstraint = nullptr;
 
@@ -1630,24 +1623,24 @@ class GetUserMediaTask : public Runnable
           mAudioDevice->Deallocate();
         }
       } else {
         if (!mIsChrome) {
           if (mShouldFocusSource) {
             rv = mVideoDevice->FocusOnSelectedSource();
 
             if (NS_FAILED(rv)) {
-              LOG(("FocusOnSelectedSource failed"));
+              LOG("FocusOnSelectedSource failed");
             }
           }
         }
       }
     }
     if (errorMsg) {
-      LOG(("%s %" PRIu32, errorMsg, static_cast<uint32_t>(rv)));
+      LOG("%s %" PRIu32, errorMsg, static_cast<uint32_t>(rv));
       if (badConstraint) {
         Fail(MediaMgrError::Name::OverconstrainedError, NS_LITERAL_STRING(""),
              NS_ConvertUTF8toUTF16(badConstraint));
       } else {
         Fail(MediaMgrError::Name::NotReadableError,
              NS_ConvertUTF8toUTF16(errorMsg));
       }
       NS_DispatchToMainThread(NS_NewRunnableFunction(
@@ -1779,23 +1772,23 @@ RefPtr<MediaManager::MediaDeviceSetPromi
              "If loopback video is requested video type should be camera!");
   MOZ_ASSERT(aAudioInputEnumType != DeviceEnumerationType::Fake ||
                  aAudioInputType == MediaSourceEnum::Microphone,
              "If fake mics are requested audio type should be microphone!");
   MOZ_ASSERT(aAudioInputEnumType != DeviceEnumerationType::Loopback ||
                  aAudioInputType == MediaSourceEnum::Microphone,
              "If loopback audio is requested audio type should be microphone!");
 
-  LOG(("%s: aWindowId=%" PRIu64 ", aVideoInputType=%" PRIu8
-       ", aAudioInputType=%" PRIu8 ", aVideoInputEnumType=%" PRIu8
-       ", aAudioInputEnumType=%" PRIu8,
-       __func__, aWindowId, static_cast<uint8_t>(aVideoInputType),
-       static_cast<uint8_t>(aAudioInputType),
-       static_cast<uint8_t>(aVideoInputEnumType),
-       static_cast<uint8_t>(aAudioInputEnumType)));
+  LOG("%s: aWindowId=%" PRIu64 ", aVideoInputType=%" PRIu8
+      ", aAudioInputType=%" PRIu8 ", aVideoInputEnumType=%" PRIu8
+      ", aAudioInputEnumType=%" PRIu8,
+      __func__, aWindowId, static_cast<uint8_t>(aVideoInputType),
+      static_cast<uint8_t>(aAudioInputType),
+      static_cast<uint8_t>(aVideoInputEnumType),
+      static_cast<uint8_t>(aAudioInputEnumType));
 
   auto holder = MakeUnique<MozPromiseHolder<MediaDeviceSetPromise>>();
   RefPtr<MediaDeviceSetPromise> promise = holder->Ensure(__func__);
 
   bool hasVideo = aVideoInputType != MediaSourceEnum::Other;
   bool hasAudio = aAudioInputType != MediaSourceEnum::Other;
   bool hasAudioOutput = aAudioOutputType == MediaSinkEnum::Speaker;
 
@@ -1835,30 +1828,28 @@ RefPtr<MediaManager::MediaDeviceSetPromi
       MOZ_RELEASE_ASSERT(manager);  // Must exist while media thread is alive
       realBackend = manager->GetBackend(aWindowId);
     }
 
     auto result = MakeRefPtr<MediaDeviceSetRefCnt>();
 
     if (hasVideo) {
       MediaDeviceSet videos;
-      LOG(("EnumerateRawDevices Task: Getting video sources with %s backend",
-           aVideoInputEnumType == DeviceEnumerationType::Fake ? "fake"
-                                                              : "real"));
+      LOG("EnumerateRawDevices Task: Getting video sources with %s backend",
+          aVideoInputEnumType == DeviceEnumerationType::Fake ? "fake" : "real");
       GetMediaDevices(aVideoInputEnumType == DeviceEnumerationType::Fake
                           ? fakeBackend
                           : realBackend,
                       aWindowId, aVideoInputType, videos, videoLoopDev.get());
       result->AppendElements(videos);
     }
     if (hasAudio) {
       MediaDeviceSet audios;
-      LOG(("EnumerateRawDevices Task: Getting audio sources with %s backend",
-           aAudioInputEnumType == DeviceEnumerationType::Fake ? "fake"
-                                                              : "real"));
+      LOG("EnumerateRawDevices Task: Getting audio sources with %s backend",
+          aAudioInputEnumType == DeviceEnumerationType::Fake ? "fake" : "real");
       GetMediaDevices(aAudioInputEnumType == DeviceEnumerationType::Fake
                           ? fakeBackend
                           : realBackend,
                       aWindowId, aAudioInputType, audios, audioLoopDev.get());
       result->AppendElements(audios);
     }
     if (hasAudioOutput) {
       MediaDeviceSet outputs;
@@ -1920,26 +1911,25 @@ MediaManager::MediaManager() : mMediaThr
   nsCOMPtr<nsIPrefService> prefs =
       do_GetService("@mozilla.org/preferences-service;1", &rv);
   if (NS_SUCCEEDED(rv)) {
     nsCOMPtr<nsIPrefBranch> branch = do_QueryInterface(prefs);
     if (branch) {
       GetPrefs(branch, nullptr);
     }
   }
-  LOG(
-      ("%s: default prefs: %dx%d @%dfps, %dHz test tones, aec: %s,"
-       "agc: %s, noise: %s, aec level: %d, agc level: %d, noise level: %d,"
-       "%sfull_duplex, extended aec %s, delay_agnostic %s "
-       "channels %d",
-       __FUNCTION__, mPrefs.mWidth, mPrefs.mHeight, mPrefs.mFPS, mPrefs.mFreq,
-       mPrefs.mAecOn ? "on" : "off", mPrefs.mAgcOn ? "on" : "off",
-       mPrefs.mNoiseOn ? "on" : "off", mPrefs.mAec, mPrefs.mAgc, mPrefs.mNoise,
-       mPrefs.mFullDuplex ? "" : "not ", mPrefs.mExtendedFilter ? "on" : "off",
-       mPrefs.mDelayAgnostic ? "on" : "off", mPrefs.mChannels));
+  LOG("%s: default prefs: %dx%d @%dfps, %dHz test tones, aec: %s,"
+      "agc: %s, noise: %s, aec level: %d, agc level: %d, noise level: %d,"
+      "%sfull_duplex, extended aec %s, delay_agnostic %s "
+      "channels %d",
+      __FUNCTION__, mPrefs.mWidth, mPrefs.mHeight, mPrefs.mFPS, mPrefs.mFreq,
+      mPrefs.mAecOn ? "on" : "off", mPrefs.mAgcOn ? "on" : "off",
+      mPrefs.mNoiseOn ? "on" : "off", mPrefs.mAec, mPrefs.mAgc, mPrefs.mNoise,
+      mPrefs.mFullDuplex ? "" : "not ", mPrefs.mExtendedFilter ? "on" : "off",
+      mPrefs.mDelayAgnostic ? "on" : "off", mPrefs.mChannels);
 }
 
 NS_IMPL_ISUPPORTS(MediaManager, nsIMediaManagerService, nsIObserver)
 
 /* static */ StaticRefPtr<MediaManager> MediaManager::sSingleton;
 
 #ifdef DEBUG
 /* static */ bool MediaManager::IsInMediaThread() {
@@ -1996,17 +1986,17 @@ class MTAThread : public base::Thread {
     options.message_loop_type = MessageLoop::TYPE_MOZILLA_NONMAINUITHREAD;
 #else
     options.message_loop_type = MessageLoop::TYPE_MOZILLA_NONMAINTHREAD;
 #endif
     if (!sSingleton->mMediaThread->StartWithOptions(options)) {
       MOZ_CRASH();
     }
 
-    LOG(("New Media thread for gum"));
+    LOG("New Media thread for gum");
 
     nsCOMPtr<nsIObserverService> obs = services::GetObserverService();
     if (obs) {
       obs->AddObserver(sSingleton, "last-pb-context-exited", false);
       obs->AddObserver(sSingleton, "getUserMedia:got-device-permission", false);
       obs->AddObserver(sSingleton, "getUserMedia:privileged:allow", false);
       obs->AddObserver(sSingleton, "getUserMedia:response:allow", false);
       obs->AddObserver(sSingleton, "getUserMedia:response:deny", false);
@@ -2689,100 +2679,96 @@ RefPtr<MediaManager::StreamPromise> Medi
       (videoEnumerationType != DeviceEnumerationType::Fake && hasVideo) ||
       (audioEnumerationType != DeviceEnumerationType::Fake && hasAudio);
   bool askPermission =
       (!privileged ||
        Preferences::GetBool("media.navigator.permission.force")) &&
       (realDevicesRequested ||
        Preferences::GetBool("media.navigator.permission.fake"));
 
-  LOG(("%s: Preparing to enumerate devices. windowId=%" PRIu64
-       ", videoType=%" PRIu8 ", audioType=%" PRIu8
-       ", videoEnumerationType=%" PRIu8 ", audioEnumerationType=%" PRIu8
-       ", askPermission=%s",
-       __func__, windowID, static_cast<uint8_t>(videoType),
-       static_cast<uint8_t>(audioType),
-       static_cast<uint8_t>(videoEnumerationType),
-       static_cast<uint8_t>(audioEnumerationType),
-       askPermission ? "true" : "false"));
+  LOG("%s: Preparing to enumerate devices. windowId=%" PRIu64
+      ", videoType=%" PRIu8 ", audioType=%" PRIu8
+      ", videoEnumerationType=%" PRIu8 ", audioEnumerationType=%" PRIu8
+      ", askPermission=%s",
+      __func__, windowID, static_cast<uint8_t>(videoType),
+      static_cast<uint8_t>(audioType),
+      static_cast<uint8_t>(videoEnumerationType),
+      static_cast<uint8_t>(audioEnumerationType),
+      askPermission ? "true" : "false");
 
   RefPtr<MediaManager> self = this;
   return EnumerateDevicesImpl(windowID, videoType, audioType,
                               MediaSinkEnum::Other, videoEnumerationType,
                               audioEnumerationType)
       ->Then(
           GetCurrentThreadSerialEventTarget(), __func__,
           [self, windowID, c, windowListener, sourceListener, askPermission,
            prefs, isHTTPS, isHandlingUserInput, callID, principalInfo, isChrome,
            resistFingerprinting](RefPtr<MediaDeviceSetRefCnt>&& aDevices)
               -> RefPtr<StreamPromise> {
-            LOG(
-                ("GetUserMedia: post enumeration promise success callback "
-                 "starting"));
+            LOG("GetUserMedia: post enumeration promise success callback "
+                "starting");
             // Ensure that our windowID is still good.
             auto* globalWindow =
                 nsGlobalWindowInner::GetInnerWindowWithId(windowID);
             RefPtr<nsPIDOMWindowInner> window =
                 globalWindow ? globalWindow->AsInner() : nullptr;
             if (!window || !self->IsWindowListenerStillActive(windowListener)) {
-              LOG(("GetUserMedia: bad window (%" PRIu64 ") in post enumeration "
-                   "success callback!",
-                   windowID));
+              LOG("GetUserMedia: bad window (%" PRIu64
+                  ") in post enumeration success callback!",
+                  windowID);
               return StreamPromise::CreateAndReject(
                   MakeRefPtr<MediaMgrError>(MediaMgrError::Name::AbortError),
                   __func__);
             }
             // Apply any constraints. This modifies the passed-in list.
             return self->SelectSettings(c, isChrome, aDevices)
                 ->Then(
                     GetCurrentThreadSerialEventTarget(), __func__,
                     [self, windowID, c, windowListener, sourceListener,
                      askPermission, prefs, isHTTPS, isHandlingUserInput, callID,
                      principalInfo, isChrome, aDevices,
                      resistFingerprinting](const char* badConstraint) mutable {
-                      LOG(
-                          ("GetUserMedia: starting post enumeration promise2 "
-                           "success "
-                           "callback!"));
+                      LOG("GetUserMedia: starting post enumeration promise2 "
+                          "success "
+                          "callback!");
 
                       // Ensure that the window is still good.
                       auto* globalWindow =
                           nsGlobalWindowInner::GetInnerWindowWithId(windowID);
                       RefPtr<nsPIDOMWindowInner> window =
                           globalWindow ? globalWindow->AsInner() : nullptr;
                       if (!window ||
                           !self->IsWindowListenerStillActive(windowListener)) {
-                        LOG(("GetUserMedia: bad window (%" PRIu64
-                             ") in post enumeration "
-                             "success callback 2!",
-                             windowID));
+                        LOG("GetUserMedia: bad window (%" PRIu64
+                            ") in post enumeration "
+                            "success callback 2!",
+                            windowID);
                         return StreamPromise::CreateAndReject(
                             MakeRefPtr<MediaMgrError>(
                                 MediaMgrError::Name::AbortError),
                             __func__);
                       }
 
                       if (badConstraint) {
-                        LOG(
-                            ("GetUserMedia: bad constraint found in post "
-                             "enumeration promise2 "
-                             "success callback! Calling error handler!"));
+                        LOG("GetUserMedia: bad constraint found in post "
+                            "enumeration promise2 "
+                            "success callback! Calling error handler!");
                         nsString constraint;
                         constraint.AssignASCII(badConstraint);
                         return StreamPromise::CreateAndReject(
                             MakeRefPtr<MediaMgrError>(
                                 MediaMgrError::Name::OverconstrainedError,
                                 NS_LITERAL_STRING(""), constraint),
                             __func__);
                       }
                       if (!aDevices->Length()) {
-                        LOG(
-                            ("GetUserMedia: no devices found in post "
-                             "enumeration promise2 "
-                             "success callback! Calling error handler!"));
+                        LOG("GetUserMedia: no devices found in post "
+                            "enumeration promise2 "
+                            "success callback! Calling error handler!");
                         // When privacy.resistFingerprinting = true, no
                         // available device implies content script is requesting
                         // a fake device, so report NotAllowedError.
                         auto error = resistFingerprinting
                                          ? MediaMgrError::Name::NotAllowedError
                                          : MediaMgrError::Name::NotFoundError;
                         return StreamPromise::CreateAndReject(
                             MakeRefPtr<MediaMgrError>(error), __func__);
@@ -2855,29 +2841,27 @@ RefPtr<MediaManager::StreamPromise> Medi
                       }
 
 #ifdef MOZ_WEBRTC
                       EnableWebRtcLog();
 #endif
                       return p;
                     },
                     [](nsresult rv) {
-                      LOG(
-                          ("GetUserMedia: post enumeration SelectSettings "
-                           "failure callback called!"));
+                      LOG("GetUserMedia: post enumeration SelectSettings "
+                          "failure callback called!");
                       return StreamPromise::CreateAndReject(
                           MakeRefPtr<MediaMgrError>(
                               MediaMgrError::Name::AbortError),
                           __func__);
                     });
           },
           [](RefPtr<MediaMgrError>&& aError) {
-            LOG(
-                ("GetUserMedia: post enumeration EnumerateDevicesImpl "
-                 "failure callback called!"));
+            LOG("GetUserMedia: post enumeration EnumerateDevicesImpl "
+                "failure callback called!");
             return StreamPromise::CreateAndReject(std::move(aError), __func__);
           });
 }
 
 /* static */ void MediaManager::AnonymizeDevices(MediaDeviceSet& aDevices,
                                                  const nsACString& aOriginKey) {
   if (!aOriginKey.IsEmpty()) {
     for (RefPtr<MediaDevice>& device : aDevices) {
@@ -2962,23 +2946,23 @@ already_AddRefed<nsIWritableVariant> Med
 
 RefPtr<MediaManager::MediaDeviceSetPromise> MediaManager::EnumerateDevicesImpl(
     uint64_t aWindowId, MediaSourceEnum aVideoInputType,
     MediaSourceEnum aAudioInputType, MediaSinkEnum aAudioOutputType,
     DeviceEnumerationType aVideoInputEnumType,
     DeviceEnumerationType aAudioInputEnumType) {
   MOZ_ASSERT(NS_IsMainThread());
 
-  LOG(("%s: aWindowId=%" PRIu64 ", aVideoInputType=%" PRIu8
-       ", aAudioInputType=%" PRIu8 ", aVideoInputEnumType=%" PRIu8
-       ", aAudioInputEnumType=%" PRIu8,
-       __func__, aWindowId, static_cast<uint8_t>(aVideoInputType),
-       static_cast<uint8_t>(aAudioInputType),
-       static_cast<uint8_t>(aVideoInputEnumType),
-       static_cast<uint8_t>(aAudioInputEnumType)));
+  LOG("%s: aWindowId=%" PRIu64 ", aVideoInputType=%" PRIu8
+      ", aAudioInputType=%" PRIu8 ", aVideoInputEnumType=%" PRIu8
+      ", aAudioInputEnumType=%" PRIu8,
+      __func__, aWindowId, static_cast<uint8_t>(aVideoInputType),
+      static_cast<uint8_t>(aAudioInputType),
+      static_cast<uint8_t>(aVideoInputEnumType),
+      static_cast<uint8_t>(aAudioInputEnumType));
   nsPIDOMWindowInner* window =
       nsGlobalWindowInner::GetInnerWindowWithId(aWindowId)->AsInner();
 
   // To get a device list anonymized for a particular origin, we must:
   // 1. Get an origin-key (for either regular or private browsing)
   // 2. Get the raw devices list
   // 3. Anonymize the raw list with the origin-key.
 
@@ -3273,17 +3257,17 @@ MediaEngine* MediaManager::GetBackend(ui
 #endif
     mBackend->AddDeviceChangeCallback(this);
   }
   return mBackend;
 }
 
 void MediaManager::OnNavigation(uint64_t aWindowID) {
   MOZ_ASSERT(NS_IsMainThread());
-  LOG(("OnNavigation for %" PRIu64, aWindowID));
+  LOG("OnNavigation for %" PRIu64, aWindowID);
 
   // Stop the streams for this window. The runnables check this value before
   // making a call to content.
 
   nsTArray<nsString>* callIDs;
   if (mCallIds.Get(aWindowID, &callIDs)) {
     for (auto& callID : *callIDs) {
       mActiveCallbacks.Remove(callID);
@@ -3353,37 +3337,37 @@ void MediaManager::AddWindowID(uint64_t 
 }
 
 void MediaManager::RemoveWindowID(uint64_t aWindowId) {
   mActiveWindows.Remove(aWindowId);
 
   // get outer windowID
   auto* window = nsGlobalWindowInner::GetInnerWindowWithId(aWindowId);
   if (!window) {
-    LOG(("No inner window for %" PRIu64, aWindowId));
+    LOG("No inner window for %" PRIu64, aWindowId);
     return;
   }
 
   nsPIDOMWindowOuter* outer = window->AsInner()->GetOuterWindow();
   if (!outer) {
-    LOG(("No outer window for inner %" PRIu64, aWindowId));
+    LOG("No outer window for inner %" PRIu64, aWindowId);
     return;
   }
 
   uint64_t outerID = outer->WindowID();
 
   // Notify the UI that this window no longer has gUM active
   char windowBuffer[32];
   SprintfLiteral(windowBuffer, "%" PRIu64, outerID);
   nsString data = NS_ConvertUTF8toUTF16(windowBuffer);
 
   nsCOMPtr<nsIObserverService> obs = services::GetObserverService();
   obs->NotifyObservers(nullptr, "recording-window-ended", data.get());
-  LOG(("Sent recording-window-ended for window %" PRIu64 " (outer %" PRIu64 ")",
-       aWindowId, outerID));
+  LOG("Sent recording-window-ended for window %" PRIu64 " (outer %" PRIu64 ")",
+      aWindowId, outerID);
 }
 
 bool MediaManager::IsWindowListenerStillActive(
     GetUserMediaWindowListener* aListener) {
   MOZ_DIAGNOSTIC_ASSERT(aListener);
   return aListener && aListener == GetWindowListener(aListener->WindowID());
 }
 
@@ -3510,36 +3494,35 @@ void MediaManager::Shutdown() {
     ShutdownTask(MediaManager* aManager, already_AddRefed<Runnable> aReply)
         : mozilla::Runnable("ShutdownTask"),
           mManager(aManager),
           mReply(aReply) {}
 
    private:
     NS_IMETHOD
     Run() override {
-      LOG(("MediaManager Thread Shutdown"));
+      LOG("MediaManager Thread Shutdown");
       MOZ_ASSERT(MediaManager::IsInMediaThread());
       // Must shutdown backend on MediaManager thread, since that's where we
       // started it from!
       {
         if (mManager->mBackend) {
           mManager->mBackend->Shutdown();  // ok to invoke multiple times
           mManager->mBackend->RemoveDeviceChangeCallback(mManager);
         }
       }
       mozilla::ipc::BackgroundChild::CloseForCurrentThread();
       // must explicitly do this before dispatching the reply, since the reply
       // may kill us with Stop()
       mManager->mBackend =
           nullptr;  // last reference, will invoke Shutdown() again
 
       if (NS_FAILED(NS_DispatchToMainThread(mReply.forget()))) {
-        LOG(
-            ("Will leak thread: DispatchToMainthread of reply runnable failed "
-             "in MediaManager shutdown"));
+        LOG("Will leak thread: DispatchToMainthread of reply runnable failed "
+            "in MediaManager shutdown");
       }
 
       return NS_OK;
     }
     RefPtr<MediaManager> mManager;
     RefPtr<Runnable> mReply;
   };
 
@@ -3554,19 +3537,18 @@ void MediaManager::Shutdown() {
   MOZ_ASSERT(this == sSingleton);
   RefPtr<MediaManager> that = this;
 
   // Release the backend (and call Shutdown()) from within the MediaManager
   // thread Don't use MediaManager::PostTask() because we're sHasShutdown=true
   // here!
   RefPtr<ShutdownTask> shutdown = new ShutdownTask(
       this, media::NewRunnableFrom([this, that]() mutable {
-        LOG(
-            ("MediaManager shutdown lambda running, releasing MediaManager "
-             "singleton and thread"));
+        LOG("MediaManager shutdown lambda running, releasing MediaManager "
+            "singleton and thread");
         if (mMediaThread) {
           mMediaThread->Stop();
         }
 
         // Remove async shutdown blocker
 
         nsCOMPtr<nsIAsyncShutdownClient> shutdownPhase = GetShutdownPhase();
         shutdownPhase->RemoveBlocker(sSingleton->mShutdownBlocker);
@@ -3608,18 +3590,18 @@ nsresult MediaManager::Observe(nsISuppor
   MOZ_ASSERT(NS_IsMainThread());
 
   MediaMgrError::Name gumNoAccessError = MediaMgrError::Name::NotAllowedError;
 
   if (!strcmp(aTopic, NS_PREFBRANCH_PREFCHANGE_TOPIC_ID)) {
     nsCOMPtr<nsIPrefBranch> branch(do_QueryInterface(aSubject));
     if (branch) {
       GetPrefs(branch, NS_ConvertUTF16toUTF8(aData).get());
-      LOG(("%s: %dx%d @%dfps", __FUNCTION__, mPrefs.mWidth, mPrefs.mHeight,
-           mPrefs.mFPS));
+      LOG("%s: %dx%d @%dfps", __FUNCTION__, mPrefs.mWidth, mPrefs.mHeight,
+          mPrefs.mFPS);
     }
   } else if (!strcmp(aTopic, "last-pb-context-exited")) {
     // Clear memory of private-browsing-specific deviceIds. Fire and forget.
     media::SanitizeOriginKeys(0, true);
     return NS_OK;
   } else if (!strcmp(aTopic, "getUserMedia:got-device-permission")) {
     MOZ_ASSERT(aSubject);
     nsCOMPtr<nsIRunnable> task = do_QueryInterface(aSubject);
@@ -3711,25 +3693,25 @@ nsresult MediaManager::Observe(nsISuppor
     nsresult rv;
     // may be windowid or screen:windowid
     nsDependentString data(aData);
     if (Substring(data, 0, strlen("screen:")).EqualsLiteral("screen:")) {
       uint64_t windowID = PromiseFlatString(Substring(data, strlen("screen:")))
                               .ToInteger64(&rv);
       MOZ_ASSERT(NS_SUCCEEDED(rv));
       if (NS_SUCCEEDED(rv)) {
-        LOG(("Revoking Screen/windowCapture access for window %" PRIu64,
-             windowID));
+        LOG("Revoking Screen/windowCapture access for window %" PRIu64,
+            windowID);
         StopScreensharing(windowID);
       }
     } else {
       uint64_t windowID = nsString(aData).ToInteger64(&rv);
       MOZ_ASSERT(NS_SUCCEEDED(rv));
       if (NS_SUCCEEDED(rv)) {
-        LOG(("Revoking MediaCapture access for window %" PRIu64, windowID));
+        LOG("Revoking MediaCapture access for window %" PRIu64, windowID);
         OnNavigation(windowID);
       }
     }
     return NS_OK;
   }
 
   return NS_OK;
 }
@@ -3811,38 +3793,38 @@ MediaManager::MediaCaptureWindowState(ns
   *aCamera = FromCaptureState(camera);
   *aMicrophone = FromCaptureState(microphone);
   *aScreen = FromCaptureState(screen);
   *aWindow = FromCaptureState(window);
   *aApplication = FromCaptureState(application);
   *aBrowser = FromCaptureState(browser);
 
 #ifdef DEBUG
-  LOG(("%s: window %" PRIu64 " capturing %s %s %s %s %s %s", __FUNCTION__,
-       piWin ? piWin->WindowID() : -1,
-       *aCamera == nsIMediaManagerService::STATE_CAPTURE_ENABLED
-           ? "camera (enabled)"
-           : (*aCamera == nsIMediaManagerService::STATE_CAPTURE_DISABLED
-                  ? "camera (disabled)"
-                  : ""),
-       *aMicrophone == nsIMediaManagerService::STATE_CAPTURE_ENABLED
-           ? "microphone (enabled)"
-           : (*aMicrophone == nsIMediaManagerService::STATE_CAPTURE_DISABLED
-                  ? "microphone (disabled)"
-                  : ""),
-       *aScreen ? "screenshare" : "", *aWindow ? "windowshare" : "",
-       *aApplication ? "appshare" : "", *aBrowser ? "browsershare" : ""));
+  LOG("%s: window %" PRIu64 " capturing %s %s %s %s %s %s", __FUNCTION__,
+      piWin ? piWin->WindowID() : -1,
+      *aCamera == nsIMediaManagerService::STATE_CAPTURE_ENABLED
+          ? "camera (enabled)"
+          : (*aCamera == nsIMediaManagerService::STATE_CAPTURE_DISABLED
+                 ? "camera (disabled)"
+                 : ""),
+      *aMicrophone == nsIMediaManagerService::STATE_CAPTURE_ENABLED
+          ? "microphone (enabled)"
+          : (*aMicrophone == nsIMediaManagerService::STATE_CAPTURE_DISABLED
+                 ? "microphone (disabled)"
+                 : ""),
+      *aScreen ? "screenshare" : "", *aWindow ? "windowshare" : "",
+      *aApplication ? "appshare" : "", *aBrowser ? "browsershare" : "");
 #endif
   return NS_OK;
 }
 
 NS_IMETHODIMP
 MediaManager::SanitizeDeviceIds(int64_t aSinceWhen) {
   MOZ_ASSERT(NS_IsMainThread());
-  LOG(("%s: sinceWhen = %" PRId64, __FUNCTION__, aSinceWhen));
+  LOG("%s: sinceWhen = %" PRId64, __FUNCTION__, aSinceWhen);
 
   media::SanitizeOriginKeys(aSinceWhen, false);  // we fire and forget
   return NS_OK;
 }
 
 void MediaManager::StopScreensharing(uint64_t aWindowID) {
   // We need to stop window/screensharing for all streams in all innerwindows
   // that correspond to that outerwindow.
@@ -3977,34 +3959,33 @@ bool MediaManager::IsActivelyCapturingOr
 SourceListener::SourceListener()
     : mStopped(false),
       mRemoved(false),
       mMainThreadCheck(nullptr),
       mPrincipalHandle(PRINCIPAL_HANDLE_NONE),
       mWindowListener(nullptr) {}
 
 void SourceListener::Register(GetUserMediaWindowListener* aListener) {
-  LOG(("SourceListener %p registering with window listener %p", this,
-       aListener));
+  LOG("SourceListener %p registering with window listener %p", this, aListener);
 
   MOZ_ASSERT(aListener, "No listener");
   MOZ_ASSERT(!mWindowListener, "Already registered");
   MOZ_ASSERT(!Activated(), "Already activated");
 
   mPrincipalHandle = aListener->GetPrincipalHandle();
   mWindowListener = aListener;
 }
 
 void SourceListener::Activate(SourceMediaStream* aStream,
                               MediaDevice* aAudioDevice,
                               MediaDevice* aVideoDevice) {
   MOZ_ASSERT(NS_IsMainThread(), "Only call on main thread");
 
-  LOG(("SourceListener %p activating audio=%p video=%p", this, aAudioDevice,
-       aVideoDevice));
+  LOG("SourceListener %p activating audio=%p video=%p", this, aAudioDevice,
+      aVideoDevice);
 
   MOZ_ASSERT(!mStopped, "Cannot activate stopped source listener");
   MOZ_ASSERT(!Activated(), "Already activated");
 
   mMainThreadCheck = GetCurrentVirtualThread();
   mStream = aStream;
   if (aAudioDevice) {
     mAudioDeviceState = MakeUnique<DeviceState>(
@@ -4084,17 +4065,17 @@ SourceListener::InitializeAsync() {
                                   __func__);
                    return;
                  }
                }
 
                // Start() queued the tracks to be added synchronously to avoid
                // races
                stream->FinishAddTracks();
-               LOG(("started all sources"));
+               LOG("started all sources");
 
                aHolder.Resolve(true, __func__);
              })
       ->Then(GetMainThreadSerialEventTarget(), __func__,
              [self = RefPtr<SourceListener>(this), this]() {
                if (mStopped) {
                  // We were shut down during the async init
                  return SourceListenerPromise::CreateAndResolve(true, __func__);
@@ -4151,17 +4132,17 @@ SourceListener::InitializeAsync() {
 
 void SourceListener::Stop() {
   MOZ_ASSERT(NS_IsMainThread(), "Only call on main thread");
 
   if (mStopped) {
     return;
   }
 
-  LOG(("SourceListener %p stopping", this));
+  LOG("SourceListener %p stopping", this);
 
   // StopSharing() has some special logic, at least for audio capture.
   // It must be called when all tracks have stopped, before setting mStopped.
   StopSharing();
 
   mStopped = true;
 
   MOZ_ASSERT(Activated(), "There are no devices or any source stream to stop");
@@ -4184,17 +4165,17 @@ void SourceListener::Remove() {
   if (mVideoDeviceState) {
     mVideoDeviceState->mDisableTimer->Cancel();
   }
 
   if (!mStream || mRemoved) {
     return;
   }
 
-  LOG(("SourceListener %p removed on purpose", this));
+  LOG("SourceListener %p removed on purpose", this);
   mRemoved = true;  // RemoveListener is async, avoid races
   mWindowListener = nullptr;
 
   // If it's destroyed, don't call - listener will be removed and we'll be
   // notified!
   if (!mStream->IsDestroyed()) {
     // We disable pulling before removing so we don't risk having live tracks
     // without a listener attached - that wouldn't produce data and would be
@@ -4219,35 +4200,35 @@ void SourceListener::Remove() {
 
 void SourceListener::StopTrack(TrackID aTrackID) {
   MOZ_ASSERT(NS_IsMainThread(), "Only call on main thread");
   MOZ_ASSERT(Activated(), "No device to stop");
   MOZ_ASSERT(aTrackID == kAudioTrack || aTrackID == kVideoTrack,
              "Unknown track id");
   DeviceState& state = GetDeviceStateFor(aTrackID);
 
-  LOG(("SourceListener %p stopping %s track %d", this,
-       aTrackID == kAudioTrack ? "audio" : "video", aTrackID));
+  LOG("SourceListener %p stopping %s track %d", this,
+      aTrackID == kAudioTrack ? "audio" : "video", aTrackID);
 
   if (state.mStopped) {
     // device already stopped.
     return;
   }
   state.mStopped = true;
 
   state.mDisableTimer->Cancel();
 
   MediaManager::PostTask(NewTaskFrom([device = state.mDevice]() {
     device->Stop();
     device->Deallocate();
   }));
 
   if ((!mAudioDeviceState || mAudioDeviceState->mStopped) &&
       (!mVideoDeviceState || mVideoDeviceState->mStopped)) {
-    LOG(("SourceListener %p this was the last track stopped", this));
+    LOG("SourceListener %p this was the last track stopped", this);
     Stop();
   }
 
   MOZ_ASSERT(mWindowListener, "Should still have window listener");
   mWindowListener->ChromeAffectingStateChanged();
 }
 
 void SourceListener::GetSettingsFor(
@@ -4261,19 +4242,19 @@ void SourceListener::SetEnabledFor(Track
   MOZ_ASSERT(Activated(), "No device to set enabled state for");
   MOZ_ASSERT(aTrackID == kAudioTrack || aTrackID == kVideoTrack,
              "Unknown track id");
 
   if (mRemoved) {
     return;
   }
 
-  LOG(("SourceListener %p %s %s track %d", this,
-       aEnable ? "enabling" : "disabling",
-       aTrackID == kAudioTrack ? "audio" : "video", aTrackID));
+  LOG("SourceListener %p %s %s track %d", this,
+      aEnable ? "enabling" : "disabling",
+      aTrackID == kAudioTrack ? "audio" : "video", aTrackID);
 
   DeviceState& state = GetDeviceStateFor(aTrackID);
 
   state.mTrackEnabled = aEnable;
 
   if (state.mStopped) {
     // Device terminally stopped. Updating device state is pointless.
     return;
@@ -4321,19 +4302,19 @@ void SourceListener::SetEnabledFor(Track
       ->Then(
           GetMainThreadSerialEventTarget(), __func__,
           [self, this, &state, aTrackID, aEnable]() mutable {
             MOZ_ASSERT(state.mDeviceEnabled != aEnable,
                        "Device operation hasn't started");
             MOZ_ASSERT(state.mOperationInProgress,
                        "It's our responsibility to reset the inProgress state");
 
-            LOG(("SourceListener %p %s %s track %d - starting device operation",
-                 this, aEnable ? "enabling" : "disabling",
-                 aTrackID == kAudioTrack ? "audio" : "video", aTrackID));
+            LOG("SourceListener %p %s %s track %d - starting device operation",
+                this, aEnable ? "enabling" : "disabling",
+                aTrackID == kAudioTrack ? "audio" : "video", aTrackID);
 
             if (mRemoved) {
               // Listener was removed between timer resolving and this runnable.
               return DeviceOperationPromise::CreateAndResolve(NS_ERROR_ABORT,
                                                               __func__);
             }
 
             if (state.mStopped) {
@@ -4376,20 +4357,20 @@ void SourceListener::SetEnabledFor(Track
             state.mOperationInProgress = false;
 
             if (state.mStopped) {
               // Device was stopped on main thread during the operation. Nothing
               // to do.
               return;
             }
 
-            LOG(("SourceListener %p %s %s track %d %s", this,
-                 aEnable ? "enabling" : "disabling",
-                 aTrackID == kAudioTrack ? "audio" : "video", aTrackID,
-                 NS_SUCCEEDED(aResult) ? "succeeded" : "failed"));
+            LOG("SourceListener %p %s %s track %d %s", this,
+                aEnable ? "enabling" : "disabling",
+                aTrackID == kAudioTrack ? "audio" : "video", aTrackID,
+                NS_SUCCEEDED(aResult) ? "succeeded" : "failed");
 
             if (NS_FAILED(aResult) && aResult != NS_ERROR_ABORT) {
               // This path handles errors from starting or stopping the device.
               // NS_ERROR_ABORT are for cases where *we* aborted. They need
               // graceful handling.
               if (aEnable) {
                 // Starting the device failed. Stopping the track here will make
                 // the MediaStreamTrack end after a pass through the
@@ -4431,17 +4412,17 @@ void SourceListener::SetEnabledFor(Track
 void SourceListener::StopSharing() {
   MOZ_ASSERT(NS_IsMainThread());
   MOZ_RELEASE_ASSERT(mWindowListener);
 
   if (mStopped) {
     return;
   }
 
-  LOG(("SourceListener %p StopSharing", this));
+  LOG("SourceListener %p StopSharing", this);
 
   if (mVideoDeviceState && (mVideoDeviceState->mDevice->GetMediaSource() ==
                                 MediaSourceEnum::Screen ||
                             mVideoDeviceState->mDevice->GetMediaSource() ==
                                 MediaSourceEnum::Application ||
                             mVideoDeviceState->mDevice->GetMediaSource() ==
                                 MediaSourceEnum::Window)) {
     // We want to stop the whole stream if there's no audio;
@@ -4472,17 +4453,17 @@ void SourceListener::Pull(TrackID aTrack
                           StreamTime aDesiredTime) {
   DeviceState& state = GetDeviceStateFor(aTrackID);
   state.mDevice->Pull(mStream, aTrackID, aEndOfAppendedData, aDesiredTime,
                       mPrincipalHandle);
 }
 
 void SourceListener::NotifyRemoved(TrackID aTrackID) {
   MOZ_ASSERT(NS_IsMainThread());
-  LOG(("Track %d for SourceListener %p removed", aTrackID, this));
+  LOG("Track %d for SourceListener %p removed", aTrackID, this);
 
   StopTrack(aTrackID);
 
   if (!mStopped) {
     // There are more live tracks that need to be stopped before removal.
     return;
   }
 
@@ -4551,18 +4532,18 @@ CaptureState SourceListener::CapturingSo
 RefPtr<SourceListener::SourceListenerPromise>
 SourceListener::ApplyConstraintsToTrack(
     TrackID aTrackID, const MediaTrackConstraints& aConstraints,
     dom::CallerType aCallerType) {
   MOZ_ASSERT(NS_IsMainThread());
   DeviceState& state = GetDeviceStateFor(aTrackID);
 
   if (mStopped || state.mStopped) {
-    LOG(("gUM %s track %d applyConstraints, but source is stopped",
-         aTrackID == kAudioTrack ? "audio" : "video", aTrackID));
+    LOG("gUM %s track %d applyConstraints, but source is stopped",
+        aTrackID == kAudioTrack ? "audio" : "video", aTrackID);
     return SourceListenerPromise::CreateAndResolve(false, __func__);
   }
 
   MediaManager* mgr = MediaManager::GetIfExists();
   if (!mgr) {
     return SourceListenerPromise::CreateAndResolve(false, __func__);
   }
 
@@ -4583,18 +4564,18 @@ SourceListener::ApplyConstraintsToTrack(
               nsTArray<RefPtr<MediaDevice>> devices;
               devices.AppendElement(device);
               badConstraint = MediaConstraintsHelper::SelectSettings(
                   NormalizedConstraints(aConstraints), devices, isChrome);
             }
           } else {
             // Unexpected. ApplyConstraints* cannot fail with any other error.
             badConstraint = "";
-            LOG(("ApplyConstraintsToTrack-Task: Unexpected fail %" PRIx32,
-                 static_cast<uint32_t>(rv)));
+            LOG("ApplyConstraintsToTrack-Task: Unexpected fail %" PRIx32,
+                static_cast<uint32_t>(rv));
           }
 
           aHolder.Reject(
               MakeRefPtr<MediaMgrError>(
                   MediaMgrError::Name::OverconstrainedError,
                   NS_LITERAL_STRING(""), NS_ConvertASCIItoUTF16(badConstraint)),
               __func__);
           return;
--- a/dom/media/MediaManager.h
+++ b/dom/media/MediaManager.h
@@ -57,18 +57,16 @@ class PrincipalInfo;
 }
 
 class AllocationHandle;
 class GetUserMediaTask;
 class GetUserMediaWindowListener;
 class MediaManager;
 class SourceListener;
 
-LogModule* GetMediaManagerLog();
-
 class MediaDevice : public nsIMediaDevice {
  public:
   NS_DECL_THREADSAFE_ISUPPORTS
   NS_DECL_NSIMEDIADEVICE
 
   explicit MediaDevice(const RefPtr<MediaEngineSource>& aSource,
                        const nsString& aName, const nsString& aID,
                        const nsString& aRawID);
--- a/dom/media/webrtc/MediaEngineRemoteVideoSource.cpp
+++ b/dom/media/webrtc/MediaEngineRemoteVideoSource.cpp
@@ -13,22 +13,22 @@
 #include "mozilla/RefPtr.h"
 #include "nsIPrefService.h"
 #include "Tracing.h"
 #include "VideoFrameUtils.h"
 #include "VideoUtils.h"
 #include "webrtc/common_video/include/video_frame_buffer.h"
 #include "webrtc/common_video/libyuv/include/webrtc_libyuv.h"
 
-mozilla::LogModule* GetMediaManagerLog();
-#define LOG(msg) MOZ_LOG(GetMediaManagerLog(), mozilla::LogLevel::Debug, msg)
-#define LOGFRAME(msg) \
-  MOZ_LOG(GetMediaManagerLog(), mozilla::LogLevel::Verbose, msg)
+namespace mozilla {
 
-namespace mozilla {
+extern LazyLogModule gMediaManagerLog;
+#define LOG(...) MOZ_LOG(gMediaManagerLog, LogLevel::Debug, (__VA_ARGS__))
+#define LOG_FRAME(...) \
+  MOZ_LOG(gMediaManagerLog, LogLevel::Verbose, (__VA_ARGS__))
 
 using dom::ConstrainLongRange;
 using dom::MediaSourceEnum;
 using dom::MediaTrackConstraints;
 using dom::MediaTrackConstraintSet;
 using dom::MediaTrackSettings;
 using dom::VideoFacingModeEnum;
 
@@ -47,37 +47,37 @@ MediaEngineRemoteVideoSource::MediaEngin
   MOZ_ASSERT(aMediaSource != MediaSourceEnum::Other);
   mSettings->mWidth.Construct(0);
   mSettings->mHeight.Construct(0);
   mSettings->mFrameRate.Construct(0);
   Init();
 }
 
 void MediaEngineRemoteVideoSource::Init() {
-  LOG((__PRETTY_FUNCTION__));
+  LOG(__PRETTY_FUNCTION__);
   AssertIsOnOwningThread();
 
   char deviceName[kMaxDeviceNameLength];
   char uniqueId[kMaxUniqueIdLength];
   if (camera::GetChildAndCall(&camera::CamerasChild::GetCaptureDevice,
                               mCapEngine, mCaptureIndex, deviceName,
                               kMaxDeviceNameLength, uniqueId,
                               kMaxUniqueIdLength, nullptr)) {
-    LOG(("Error initializing RemoteVideoSource (GetCaptureDevice)"));
+    LOG("Error initializing RemoteVideoSource (GetCaptureDevice)");
     return;
   }
 
   SetName(NS_ConvertUTF8toUTF16(deviceName));
   SetUUID(uniqueId);
 
   mInitDone = true;
 }
 
 void MediaEngineRemoteVideoSource::Shutdown() {
-  LOG((__PRETTY_FUNCTION__));
+  LOG(__PRETTY_FUNCTION__);
   AssertIsOnOwningThread();
 
   if (!mInitDone) {
     // Already shut down
     return;
   }
 
   // Allocate always returns a null AllocationHandle.
@@ -89,17 +89,17 @@ void MediaEngineRemoteVideoSource::Shutd
     Deallocate(nullptr);
   }
   MOZ_ASSERT(mState == kReleased);
 
   mInitDone = false;
 }
 
 void MediaEngineRemoteVideoSource::SetName(nsString aName) {
-  LOG((__PRETTY_FUNCTION__));
+  LOG(__PRETTY_FUNCTION__);
   AssertIsOnOwningThread();
 
   mDeviceName = std::move(aName);
   bool hasFacingMode = false;
   VideoFacingModeEnum facingMode = VideoFacingModeEnum::User;
 
   // Set facing mode based on device name.
 #if defined(ANDROID)
@@ -161,58 +161,58 @@ nsCString MediaEngineRemoteVideoSource::
   return mUniqueId;
 }
 
 nsresult MediaEngineRemoteVideoSource::Allocate(
     const MediaTrackConstraints& aConstraints, const MediaEnginePrefs& aPrefs,
     const nsString& aDeviceId,
     const mozilla::ipc::PrincipalInfo& aPrincipalInfo,
     AllocationHandle** aOutHandle, const char** aOutBadConstraint) {
-  LOG((__PRETTY_FUNCTION__));
+  LOG(__PRETTY_FUNCTION__);
   AssertIsOnOwningThread();
 
   MOZ_ASSERT(mState == kReleased);
 
   if (!mInitDone) {
-    LOG(("Init not done"));
+    LOG("Init not done");
     return NS_ERROR_FAILURE;
   }
 
   NormalizedConstraints constraints(aConstraints);
   webrtc::CaptureCapability newCapability;
-  LOG(("ChooseCapability(kFitness) for mCapability (Allocate) ++"));
+  LOG("ChooseCapability(kFitness) for mCapability (Allocate) ++");
   if (!ChooseCapability(constraints, aPrefs, aDeviceId, newCapability,
                         kFitness)) {
     *aOutBadConstraint =
         MediaConstraintsHelper::FindBadConstraint(constraints, this, aDeviceId);
     return NS_ERROR_FAILURE;
   }
-  LOG(("ChooseCapability(kFitness) for mCapability (Allocate) --"));
+  LOG("ChooseCapability(kFitness) for mCapability (Allocate) --");
 
   if (camera::GetChildAndCall(&camera::CamerasChild::AllocateCaptureDevice,
                               mCapEngine, mUniqueId.get(), kMaxUniqueIdLength,
                               mCaptureIndex, aPrincipalInfo)) {
     return NS_ERROR_FAILURE;
   }
 
   *aOutHandle = nullptr;
 
   {
     MutexAutoLock lock(mMutex);
     mState = kAllocated;
     mCapability = newCapability;
   }
 
-  LOG(("Video device %d allocated", mCaptureIndex));
+  LOG("Video device %d allocated", mCaptureIndex);
   return NS_OK;
 }
 
 nsresult MediaEngineRemoteVideoSource::Deallocate(
     const RefPtr<const AllocationHandle>& aHandle) {
-  LOG((__PRETTY_FUNCTION__));
+  LOG(__PRETTY_FUNCTION__);
   AssertIsOnOwningThread();
 
   MOZ_ASSERT(mState == kStopped || mState == kAllocated);
 
   if (mStream && IsTrackIDExplicit(mTrackID)) {
     mStream->EndTrack(mTrackID);
   }
 
@@ -226,30 +226,30 @@ nsresult MediaEngineRemoteVideoSource::D
   }
 
   // Stop() has stopped capture synchronously on the media thread before we get
   // here, so there are no longer any callbacks on an IPC thread accessing
   // mImageContainer or mRescalingBufferPool.
   mImageContainer = nullptr;
   mRescalingBufferPool.Release();
 
-  LOG(("Video device %d deallocated", mCaptureIndex));
+  LOG("Video device %d deallocated", mCaptureIndex);
 
   if (camera::GetChildAndCall(&camera::CamerasChild::ReleaseCaptureDevice,
                               mCapEngine, mCaptureIndex)) {
     MOZ_ASSERT_UNREACHABLE("Couldn't release allocated device");
   }
   return NS_OK;
 }
 
 nsresult MediaEngineRemoteVideoSource::SetTrack(
     const RefPtr<const AllocationHandle>& aHandle,
     const RefPtr<SourceMediaStream>& aStream, TrackID aTrackID,
     const PrincipalHandle& aPrincipal) {
-  LOG((__PRETTY_FUNCTION__));
+  LOG(__PRETTY_FUNCTION__);
   AssertIsOnOwningThread();
 
   MOZ_ASSERT(mState == kAllocated);
   MOZ_ASSERT(!mStream);
   MOZ_ASSERT(mTrackID == TRACK_NONE);
   MOZ_ASSERT(aStream);
   MOZ_ASSERT(IsTrackIDExplicit(aTrackID));
 
@@ -266,34 +266,34 @@ nsresult MediaEngineRemoteVideoSource::S
   }
   aStream->AddTrack(aTrackID, new VideoSegment(),
                     SourceMediaStream::ADDTRACK_QUEUED);
   return NS_OK;
 }
 
 nsresult MediaEngineRemoteVideoSource::Start(
     const RefPtr<const AllocationHandle>& aHandle) {
-  LOG((__PRETTY_FUNCTION__));
+  LOG(__PRETTY_FUNCTION__);
   AssertIsOnOwningThread();
 
   MOZ_ASSERT(mState == kAllocated || mState == kStopped);
   MOZ_ASSERT(mInitDone);
   MOZ_ASSERT(mStream);
   MOZ_ASSERT(IsTrackIDExplicit(mTrackID));
 
   {
     MutexAutoLock lock(mMutex);
     mState = kStarted;
   }
 
   mSettingsUpdatedByFrame->mValue = false;
 
   if (camera::GetChildAndCall(&camera::CamerasChild::StartCapture, mCapEngine,
                               mCaptureIndex, mCapability, this)) {
-    LOG(("StartCapture failed"));
+    LOG("StartCapture failed");
     MutexAutoLock lock(mMutex);
     mState = kStopped;
     return NS_ERROR_FAILURE;
   }
 
   NS_DispatchToMainThread(NS_NewRunnableFunction(
       "MediaEngineRemoteVideoSource::SetLastCapability",
       [settings = mSettings, updated = mSettingsUpdatedByFrame,
@@ -321,28 +321,28 @@ nsresult MediaEngineRemoteVideoSource::S
         settings->mFrameRate.Value() = cap.maxFPS;
       }));
 
   return NS_OK;
 }
 
 nsresult MediaEngineRemoteVideoSource::FocusOnSelectedSource(
     const RefPtr<const AllocationHandle>& aHandle) {
-  LOG((__PRETTY_FUNCTION__));
+  LOG(__PRETTY_FUNCTION__);
   AssertIsOnOwningThread();
 
   int result;
   result = camera::GetChildAndCall(&camera::CamerasChild::FocusOnSelectedSource,
                                    mCapEngine, mCaptureIndex);
   return result == 0 ? NS_OK : NS_ERROR_FAILURE;
 }
 
 nsresult MediaEngineRemoteVideoSource::Stop(
     const RefPtr<const AllocationHandle>& aHandle) {
-  LOG((__PRETTY_FUNCTION__));
+  LOG(__PRETTY_FUNCTION__);
   AssertIsOnOwningThread();
 
   if (mState == kStopped || mState == kAllocated) {
     return NS_OK;
   }
 
   MOZ_ASSERT(mState == kStarted);
 
@@ -364,67 +364,65 @@ nsresult MediaEngineRemoteVideoSource::S
 
   return NS_OK;
 }
 
 nsresult MediaEngineRemoteVideoSource::Reconfigure(
     const RefPtr<AllocationHandle>& aHandle,
     const MediaTrackConstraints& aConstraints, const MediaEnginePrefs& aPrefs,
     const nsString& aDeviceId, const char** aOutBadConstraint) {
-  LOG((__PRETTY_FUNCTION__));
+  LOG(__PRETTY_FUNCTION__);
   AssertIsOnOwningThread();
 
   MOZ_ASSERT(mInitDone);
 
   NormalizedConstraints constraints(aConstraints);
   webrtc::CaptureCapability newCapability;
-  LOG(("ChooseCapability(kFitness) for mTargetCapability (Reconfigure) ++"));
+  LOG("ChooseCapability(kFitness) for mTargetCapability (Reconfigure) ++");
   if (!ChooseCapability(constraints, aPrefs, aDeviceId, newCapability,
                         kFitness)) {
     *aOutBadConstraint =
         MediaConstraintsHelper::FindBadConstraint(constraints, this, aDeviceId);
     return NS_ERROR_INVALID_ARG;
   }
-  LOG(("ChooseCapability(kFitness) for mTargetCapability (Reconfigure) --"));
+  LOG("ChooseCapability(kFitness) for mTargetCapability (Reconfigure) --");
 
   if (mCapability == newCapability) {
     return NS_OK;
   }
 
   bool started = mState == kStarted;
   if (started) {
     // Allocate always returns a null AllocationHandle.
     // We can safely pass nullptr below.
     nsresult rv = Stop(nullptr);
     if (NS_WARN_IF(NS_FAILED(rv))) {
       nsAutoCString name;
       GetErrorName(rv, name);
-      LOG(
-          ("Video source %p for video device %d Reconfigure() failed "
-           "unexpectedly in Stop(). rv=%s",
-           this, mCaptureIndex, name.Data()));
+      LOG("Video source %p for video device %d Reconfigure() failed "
+          "unexpectedly in Stop(). rv=%s",
+          this, mCaptureIndex, name.Data());
       return NS_ERROR_UNEXPECTED;
     }
   }
 
   {
     MutexAutoLock lock(mMutex);
     // Start() applies mCapability on the device.
     mCapability = newCapability;
   }
 
   if (started) {
     nsresult rv = Start(nullptr);
     if (NS_WARN_IF(NS_FAILED(rv))) {
       nsAutoCString name;
       GetErrorName(rv, name);
-      LOG(
-          ("Video source %p for video device %d Reconfigure() failed "
-           "unexpectedly in Start(). rv=%s",
-           this, mCaptureIndex, name.Data()));
+      LOG("Video source %p for video device %d Reconfigure() failed "
+          "unexpectedly in Start(). rv=%s",
+          this, mCaptureIndex, name.Data());
       return NS_ERROR_UNEXPECTED;
     }
   }
 
   return NS_OK;
 }
 
 size_t MediaEngineRemoteVideoSource::NumCapabilities() const {
@@ -609,22 +607,22 @@ int MediaEngineRemoteVideoSource::Delive
     MOZ_ASSERT_UNREACHABLE(
         "We might fail to allocate a buffer, but with this "
         "being a recycling container that shouldn't happen");
     return 0;
   }
 
 #ifdef DEBUG
   static uint32_t frame_num = 0;
-  LOGFRAME(
-      ("frame %d (%dx%d)->(%dx%d); rotation %d, timeStamp %u, "
-       "ntpTimeMs %" PRIu64 ", renderTimeMs %" PRIu64,
-       frame_num++, aProps.width(), aProps.height(), dst_width, dst_height,
-       aProps.rotation(), aProps.timeStamp(), aProps.ntpTimeMs(),
-       aProps.renderTimeMs()));
+  LOG_FRAME(
+      "frame %d (%dx%d)->(%dx%d); rotation %d, timeStamp %u, ntpTimeMs %" PRIu64
+      ", renderTimeMs %" PRIu64,
+      frame_num++, aProps.width(), aProps.height(), dst_width, dst_height,
+      aProps.rotation(), aProps.timeStamp(), aProps.ntpTimeMs(),
+      aProps.renderTimeMs());
 #endif
 
   if (mImageSize.width != dst_width || mImageSize.height != dst_height) {
     NS_DispatchToMainThread(NS_NewRunnableFunction(
         "MediaEngineRemoteVideoSource::FrameSizeChange",
         [settings = mSettings, updated = mSettingsUpdatedByFrame, dst_width,
          dst_height]() mutable {
           settings->mWidth.Value() = dst_width;
@@ -768,36 +766,36 @@ uint32_t MediaEngineRemoteVideoSource::G
 
 static void LogCapability(const char* aHeader,
                           const webrtc::CaptureCapability& aCapability,
                           uint32_t aDistance) {
   static const char* const codec[] = {"VP8",           "VP9",          "H264",
                                       "I420",          "RED",          "ULPFEC",
                                       "Generic codec", "Unknown codec"};
 
-  LOG(("%s: %4u x %4u x %2u maxFps, %s. Distance = %" PRIu32, aHeader,
-       aCapability.width, aCapability.height, aCapability.maxFPS,
-       codec[std::min(std::max(uint32_t(0), uint32_t(aCapability.videoType)),
-                      uint32_t(sizeof(codec) / sizeof(*codec) - 1))],
-       aDistance));
+  LOG("%s: %4u x %4u x %2u maxFps, %s. Distance = %" PRIu32, aHeader,
+      aCapability.width, aCapability.height, aCapability.maxFPS,
+      codec[std::min(std::max(uint32_t(0), uint32_t(aCapability.videoType)),
+                     uint32_t(sizeof(codec) / sizeof(*codec) - 1))],
+      aDistance);
 }
 
 bool MediaEngineRemoteVideoSource::ChooseCapability(
     const NormalizedConstraints& aConstraints, const MediaEnginePrefs& aPrefs,
     const nsString& aDeviceId, webrtc::CaptureCapability& aCapability,
     const DistanceCalculation aCalculate) {
-  LOG((__PRETTY_FUNCTION__));
+  LOG(__PRETTY_FUNCTION__);
   AssertIsOnOwningThread();
 
-  if (MOZ_LOG_TEST(GetMediaManagerLog(), LogLevel::Debug)) {
-    LOG(("ChooseCapability: prefs: %dx%d @%dfps", aPrefs.GetWidth(),
-         aPrefs.GetHeight(), aPrefs.mFPS));
+  if (MOZ_LOG_TEST(gMediaManagerLog, LogLevel::Debug)) {
+    LOG("ChooseCapability: prefs: %dx%d @%dfps", aPrefs.GetWidth(),
+        aPrefs.GetHeight(), aPrefs.mFPS);
     MediaConstraintsHelper::LogConstraints(aConstraints);
     if (!aConstraints.mAdvanced.empty()) {
-      LOG(("Advanced array[%zu]:", aConstraints.mAdvanced.size()));
+      LOG("Advanced array[%zu]:", aConstraints.mAdvanced.size());
       for (auto& advanced : aConstraints.mAdvanced) {
         MediaConstraintsHelper::LogConstraints(advanced);
       }
     }
   }
 
   switch (mMediaSource) {
     case MediaSourceEnum::Screen:
@@ -888,18 +886,18 @@ bool MediaEngineRemoteVideoSource::Choos
     if (candidate.mDistance == UINT32_MAX) {
       candidateSet.RemoveElementAt(i);
     } else {
       ++i;
     }
   }
 
   if (candidateSet.IsEmpty()) {
-    LOG(("failed to find capability match from %zu choices",
-         candidateSet.Length()));
+    LOG("failed to find capability match from %zu choices",
+        candidateSet.Length());
     return false;
   }
 
   // Filter further with all advanced constraints (that don't overconstrain).
 
   for (const auto& cs : aConstraints.mAdvanced) {
     nsTArray<CapabilityCandidate> rejects;
     for (size_t i = 0; i < candidateSet.Length();) {
@@ -947,17 +945,17 @@ bool MediaEngineRemoteVideoSource::Choos
 }
 
 void MediaEngineRemoteVideoSource::GetSettings(
     MediaTrackSettings& aOutSettings) const {
   aOutSettings = *mSettings;
 }
 
 void MediaEngineRemoteVideoSource::Refresh(int aIndex) {
-  LOG((__PRETTY_FUNCTION__));
+  LOG(__PRETTY_FUNCTION__);
   AssertIsOnOwningThread();
 
   // NOTE: mCaptureIndex might have changed when allocated!
   // Use aIndex to update information, but don't change mCaptureIndex!!
   // Caller looked up this source by uniqueId, so it shouldn't change
   char deviceName[kMaxDeviceNameLength];
   char uniqueId[kMaxUniqueIdLength];
 
--- a/dom/media/webrtc/MediaEngineWebRTCAudio.cpp
+++ b/dom/media/webrtc/MediaEngineWebRTCAudio.cpp
@@ -29,24 +29,21 @@
 using namespace webrtc;
 
 // These are restrictions from the webrtc.org code
 #define MAX_CHANNELS 2
 #define MAX_SAMPLING_FREQ 48000  // Hz - multiple of 100
 
 namespace mozilla {
 
-#ifdef LOG
-#undef LOG
-#endif
-
-LogModule* GetMediaManagerLog();
-#define LOG(msg) MOZ_LOG(GetMediaManagerLog(), mozilla::LogLevel::Debug, msg)
-#define LOG_FRAMES(msg) \
-  MOZ_LOG(GetMediaManagerLog(), mozilla::LogLevel::Verbose, msg)
+extern LazyLogModule gMediaManagerLog;
+#define LOG(...) MOZ_LOG(gMediaManagerLog, LogLevel::Debug, (__VA_ARGS__))
+#define LOG_FRAME(...) \
+  MOZ_LOG(gMediaManagerLog, LogLevel::Verbose, (__VA_ARGS__))
+#define LOG_ERROR(...) MOZ_LOG(gMediaManagerLog, LogLevel::Error, (__VA_ARGS__))
 
 /**
  * WebRTC Microphone MediaEngineSource.
  */
 
 MediaEngineWebRTCMicrophoneSource::MediaEngineWebRTCMicrophoneSource(
     RefPtr<AudioDeviceInfo> aInfo, const nsString& aDeviceName,
     const nsCString& aDeviceUUID, uint32_t aMaxChannelCount,
@@ -139,48 +136,48 @@ nsresult MediaEngineWebRTCMicrophoneSour
   }
 
   // Get the number of channels asked for by content, and clamp it between the
   // pref and the maximum number of channels that the device supports.
   prefs.mChannels =
       c.mChannelCount.Get(std::min(aInPrefs.mChannels, maxChannels));
   prefs.mChannels = std::max(1, std::min(prefs.mChannels, maxChannels));
 
-  LOG(("Audio config: aec: %d, agc: %d, noise: %d, channels: %d",
-       prefs.mAecOn ? prefs.mAec : -1, prefs.mAgcOn ? prefs.mAgc : -1,
-       prefs.mNoiseOn ? prefs.mNoise : -1, prefs.mChannels));
+  LOG("Audio config: aec: %d, agc: %d, noise: %d, channels: %d",
+      prefs.mAecOn ? prefs.mAec : -1, prefs.mAgcOn ? prefs.mAgc : -1,
+      prefs.mNoiseOn ? prefs.mNoise : -1, prefs.mChannels);
 
   *aOutPrefs = prefs;
 
   return NS_OK;
 }
 
 nsresult MediaEngineWebRTCMicrophoneSource::Reconfigure(
     const RefPtr<AllocationHandle>&,
     const dom::MediaTrackConstraints& aConstraints,
     const MediaEnginePrefs& aPrefs, const nsString& /* aDeviceId */,
     const char** aOutBadConstraint) {
   AssertIsOnOwningThread();
   MOZ_ASSERT(mStream);
 
-  LOG(("Mic source %p Reconfigure ", this));
+  LOG("Mic source %p Reconfigure ", this);
 
   NormalizedConstraints constraints(aConstraints);
   MediaEnginePrefs outputPrefs;
   nsresult rv =
       EvaluateSettings(constraints, aPrefs, &outputPrefs, aOutBadConstraint);
   if (NS_FAILED(rv)) {
     if (aOutBadConstraint) {
       return NS_ERROR_INVALID_ARG;
     }
 
     nsAutoCString name;
     GetErrorName(rv, name);
-    LOG(("Mic source %p Reconfigure() failed unexpectedly. rv=%s", this,
-         name.Data()));
+    LOG("Mic source %p Reconfigure() failed unexpectedly. rv=%s", this,
+        name.Data());
     Stop(nullptr);
     return NS_ERROR_UNEXPECTED;
   }
 
   ApplySettings(outputPrefs);
 
   mCurrentPrefs = outputPrefs;
 
@@ -497,19 +494,17 @@ nsresult MediaEngineWebRTCMicrophoneSour
   mTrackID = TRACK_NONE;
   mPrincipal = PRINCIPAL_HANDLE_NONE;
 
   // If empty, no callbacks to deliver data should be occuring
   MOZ_ASSERT(mState != kReleased, "Source not allocated");
   MOZ_ASSERT(mState != kStarted, "Source not stopped");
 
   mState = kReleased;
-  LOG(("Audio device %s deallocated",
-       NS_ConvertUTF16toUTF8(mDeviceName).get()));
-
+  LOG("Audio device %s deallocated", NS_ConvertUTF16toUTF8(mDeviceName).get());
   return NS_OK;
 }
 
 nsresult MediaEngineWebRTCMicrophoneSource::SetTrack(
     const RefPtr<const AllocationHandle>&,
     const RefPtr<SourceMediaStream>& aStream, TrackID aTrackID,
     const PrincipalHandle& aPrincipal) {
   AssertIsOnOwningThread();
@@ -527,17 +522,17 @@ nsresult MediaEngineWebRTCMicrophoneSour
   mTrackID = aTrackID;
   mPrincipal = aPrincipal;
 
   AudioSegment* segment = new AudioSegment();
 
   aStream->AddAudioTrack(aTrackID, aStream->GraphRate(), segment,
                          SourceMediaStream::ADDTRACK_QUEUED);
 
-  LOG(("Stream %p registered for microphone capture", aStream.get()));
+  LOG("Stream %p registered for microphone capture", aStream.get());
   return NS_OK;
 }
 
 class StartStopMessage : public ControlMessage {
  public:
   enum StartStop { Start, Stop };
 
   StartStopMessage(AudioInputProcessing* aInputProcessing, StartStop aAction)
@@ -603,18 +598,17 @@ nsresult MediaEngineWebRTCMicrophoneSour
 
   return NS_OK;
 }
 
 nsresult MediaEngineWebRTCMicrophoneSource::Stop(
     const RefPtr<const AllocationHandle>&) {
   AssertIsOnOwningThread();
 
-  LOG(("Mic source %p Stop()", this));
-
+  LOG("Mic source %p Stop()", this);
   MOZ_ASSERT(mStream, "SetTrack must have been called before ::Stop");
 
   if (mState == kStopped) {
     // Already stopped - this is allowed
     return NS_OK;
   }
 
   RefPtr<MediaEngineWebRTCMicrophoneSource> that = this;
@@ -720,64 +714,60 @@ void AudioInputProcessing::UpdateAECSett
     EchoCancellation::SuppressionLevel aLevel) {
   if (aUseAecMobile) {
     HANDLE_APM_ERROR(mAudioProcessing->echo_control_mobile()->Enable(aEnable));
     HANDLE_APM_ERROR(mAudioProcessing->echo_cancellation()->Enable(false));
   } else {
     if (aLevel != EchoCancellation::SuppressionLevel::kLowSuppression &&
         aLevel != EchoCancellation::SuppressionLevel::kModerateSuppression &&
         aLevel != EchoCancellation::SuppressionLevel::kHighSuppression) {
-      MOZ_LOG(GetMediaManagerLog(), LogLevel::Error,
-              ("Attempt to set invalid AEC suppression level %d",
-               static_cast<int>(aLevel)));
+      LOG_ERROR("Attempt to set invalid AEC suppression level %d",
+                static_cast<int>(aLevel));
 
       aLevel = EchoCancellation::SuppressionLevel::kModerateSuppression;
     }
 
     HANDLE_APM_ERROR(mAudioProcessing->echo_control_mobile()->Enable(false));
     HANDLE_APM_ERROR(mAudioProcessing->echo_cancellation()->Enable(aEnable));
     HANDLE_APM_ERROR(
         mAudioProcessing->echo_cancellation()->set_suppression_level(aLevel));
   }
 }
 
 void AudioInputProcessing::UpdateAGCSettings(bool aEnable,
                                              GainControl::Mode aMode) {
   if (aMode != GainControl::Mode::kAdaptiveAnalog &&
       aMode != GainControl::Mode::kAdaptiveDigital &&
       aMode != GainControl::Mode::kFixedDigital) {
-    MOZ_LOG(GetMediaManagerLog(), LogLevel::Error,
-            ("Attempt to set invalid AGC mode %d", static_cast<int>(aMode)));
+    LOG_ERROR("Attempt to set invalid AGC mode %d", static_cast<int>(aMode));
 
     aMode = GainControl::Mode::kAdaptiveDigital;
   }
 
 #if defined(WEBRTC_IOS) || defined(ATA) || defined(WEBRTC_ANDROID)
   if (aMode == GainControl::Mode::kAdaptiveAnalog) {
-    MOZ_LOG(GetMediaManagerLog(), LogLevel::Error,
-            ("Invalid AGC mode kAgcAdaptiveAnalog on mobile"));
+    LOG_ERROR("Invalid AGC mode kAgcAdaptiveAnalog on mobile");
     MOZ_ASSERT_UNREACHABLE(
         "Bad pref set in all.js or in about:config"
         " for the auto gain, on mobile.");
     aMode = GainControl::Mode::kFixedDigital;
   }
 #endif
   HANDLE_APM_ERROR(mAudioProcessing->gain_control()->set_mode(aMode));
   HANDLE_APM_ERROR(mAudioProcessing->gain_control()->Enable(aEnable));
 }
 
 void AudioInputProcessing::UpdateNSSettings(
     bool aEnable, webrtc::NoiseSuppression::Level aLevel) {
   if (aLevel != NoiseSuppression::Level::kLow &&
       aLevel != NoiseSuppression::Level::kModerate &&
       aLevel != NoiseSuppression::Level::kHigh &&
       aLevel != NoiseSuppression::Level::kVeryHigh) {
-    MOZ_LOG(GetMediaManagerLog(), LogLevel::Error,
-            ("Attempt to set invalid noise suppression level %d",
-             static_cast<int>(aLevel)));
+    LOG_ERROR("Attempt to set invalid noise suppression level %d",
+              static_cast<int>(aLevel));
 
     aLevel = NoiseSuppression::Level::kModerate;
   }
 
   HANDLE_APM_ERROR(mAudioProcessing->noise_suppression()->set_level(aLevel));
   HANDLE_APM_ERROR(mAudioProcessing->noise_suppression()->Enable(aEnable));
 }
 
@@ -830,17 +820,17 @@ void AudioInputProcessing::Pull(const Re
     if (!PassThrough(aStream->GraphImpl()) && mPacketizerInput) {
       // Processing is active and is processed in chunks of 10ms through the
       // input packetizer. We allow for 10ms of silence on the track to
       // accomodate the buffering worst-case.
       delta += mPacketizerInput->PacketSize();
     }
   }
 
-  LOG_FRAMES(("Pulling %" PRId64 " frames of silence.", delta));
+  LOG_FRAME("Pulling %" PRId64 " frames of silence.", delta);
 
   // This assertion fails when we append silence here in the same iteration
   // as there were real audio samples already appended by the audio callback.
   // Note that this is exempted until live samples and a subsequent chunk of
   // silence have been appended to the track. This will cover cases like:
   // - After Start(), there is silence (maybe multiple times) appended before
   //   the first audio callback.
   // - After Start(), there is real data (maybe multiple times) appended
@@ -1029,18 +1019,18 @@ void AudioInputProcessing::PacketizeAndP
     if (!mStream->GraphImpl()) {
       // The DOMMediaStream that owns mStream has been cleaned up
       // and MediaStream::DestroyImpl() has run in the MSG. This is fine and
       // can happen before the MediaManager thread gets to stop capture for
       // this MediaStream.
       continue;
     }
 
-    LOG_FRAMES(("Appending %" PRIu32 " frames of packetized audio",
-                mPacketizerInput->PacketSize()));
+    LOG_FRAME("Appending %" PRIu32 " frames of packetized audio",
+              mPacketizerInput->PacketSize());
 
 #ifdef DEBUG
     mLastCallbackAppendTime = mStream->GraphImpl()->IterationEnd();
 #endif
     mLiveFramesAppended = true;
 
     // We already have planar audio data of the right format. Insert into the
     // MSG.
@@ -1088,17 +1078,17 @@ void AudioInputProcessing::InsertInGraph
       channels[i] = write_channels[i] = samples + offset;
       offset += aFrames;
     }
 
     DeinterleaveAndConvertBuffer(aBuffer, aFrames, aChannels,
                                  write_channels.Elements());
   }
 
-  LOG_FRAMES(("Appending %zu frames of raw audio", aFrames));
+  LOG_FRAME("Appending %zu frames of raw audio", aFrames);
 
   MOZ_ASSERT(aChannels == channels.Length());
   segment.AppendFrames(buffer.forget(), channels, aFrames, mPrincipal);
 
   mStream->AppendToTrack(mTrackID, &segment);
 }
 
 // Called back on GraphDriver thread!
--- a/dom/media/webrtc/MediaTrackConstraints.cpp
+++ b/dom/media/webrtc/MediaTrackConstraints.cpp
@@ -9,20 +9,18 @@
 #include <algorithm>
 #include <iterator>
 
 #include "MediaEngineSource.h"
 #include "nsIScriptError.h"
 #include "mozilla/dom/MediaStreamTrackBinding.h"
 #include "mozilla/MediaManager.h"
 
-mozilla::LogModule* GetMediaManagerLog();
-#undef LOG
-#define LOG(msg, ...) \
-  MOZ_LOG(GetMediaManagerLog(), mozilla::LogLevel::Debug, (msg, ##__VA_ARGS__))
+extern mozilla::LazyLogModule gMediaManagerLog;
+#define LOG(...) MOZ_LOG(gMediaManagerLog, LogLevel::Debug, (__VA_ARGS__))
 
 namespace mozilla {
 
 using dom::ConstrainBooleanParameters;
 
 template <class ValueType>
 template <class ConstrainRange>
 void NormalizedConstraintSet::Range<ValueType>::SetFrom(