Bug 1332598 - Make per-frame logs in VP8TrackEncoder verbose. r=jesup
authorAndreas Pehrson <pehrsons@gmail.com>
Wed, 18 Jan 2017 15:01:47 +0100
changeset 377784 63dcf0258009ca28d9a14d624a629c4b674e2ab7
parent 377783 f5a4d8f2a6e4f8274fa0ad6979d56b2cfe919c1b
child 377785 123099d23df567ffb963b9fc5d56b9fa6bd1098a
push id1419
push userjlund@mozilla.com
push dateMon, 10 Apr 2017 20:44:07 +0000
treeherdermozilla-release@5e6801b73ef6 [default view] [failures only]
perfherder[talos] [build metrics] [platform microbench] (compared to previous push)
reviewersjesup
bugs1332598
milestone53.0a1
first release with
nightly linux32
nightly linux64
nightly mac
nightly win32
nightly win64
last release without
nightly linux32
nightly linux64
nightly mac
nightly win32
nightly win64
Bug 1332598 - Make per-frame logs in VP8TrackEncoder verbose. r=jesup MozReview-Commit-ID: HzpUlWjnAEW
dom/media/encoder/VP8TrackEncoder.cpp
--- a/dom/media/encoder/VP8TrackEncoder.cpp
+++ b/dom/media/encoder/VP8TrackEncoder.cpp
@@ -14,19 +14,19 @@
 #include "vpx/vp8cx.h"
 #include "vpx/vpx_encoder.h"
 #include "WebMWriter.h"
 #include "mozilla/media/MediaUtils.h"
 
 namespace mozilla {
 
 LazyLogModule gVP8TrackEncoderLog("VP8TrackEncoder");
-#define VP8LOG(msg, ...) MOZ_LOG(gVP8TrackEncoderLog, mozilla::LogLevel::Debug, \
-                                  (msg, ##__VA_ARGS__))
-// Debug logging macro with object pointer and class name.
+#define VP8LOG(level, msg, ...) MOZ_LOG(gVP8TrackEncoderLog, \
+                                        level, \
+                                        (msg, ##__VA_ARGS__))
 
 #define DEFAULT_BITRATE_BPS 2500000
 
 using namespace mozilla::gfx;
 using namespace mozilla::layers;
 using namespace mozilla::media;
 
 static already_AddRefed<SourceSurface>
@@ -224,19 +224,20 @@ VP8TrackEncoder::GetEncodedPartitions(En
     if (timestamp.isValid()) {
       videoData->SetTimeStamp((uint64_t)timestamp.value());
     }
     CheckedInt64 duration = FramesToUsecs(pkt->data.frame.duration, mTrackRate);
     if (duration.isValid()) {
       videoData->SetDuration((uint64_t)duration.value());
     }
     videoData->SwapInFrameData(frameData);
-    VP8LOG("GetEncodedPartitions TimeStamp %lld Duration %lld\n",
-           videoData->GetTimeStamp(), videoData->GetDuration());
-    VP8LOG("frameType %d\n", videoData->GetFrameType());
+    VP8LOG(LogLevel::Verbose,
+           "GetEncodedPartitions TimeStamp %lld, Duration %lld, FrameType %d",
+           videoData->GetTimeStamp(), videoData->GetDuration(),
+           videoData->GetFrameType());
     aData.AppendEncodedFrame(videoData);
   }
 
   return !!pkt;
 }
 
 static bool isYUV420(const PlanarYCbCrImage::Data *aData)
 {
@@ -272,17 +273,18 @@ nsresult VP8TrackEncoder::PrepareRawFram
       MOZ_ASSERT(mMuteFrame);
     }
     img = mMuteFrame;
   } else {
     img = aChunk.mFrame.GetImage();
   }
 
   if (img->GetSize() != IntSize(mFrameWidth, mFrameHeight)) {
-    VP8LOG("Dynamic resolution changes (was %dx%d, now %dx%d) are unsupported\n",
+    VP8LOG(LogLevel::Error,
+           "Dynamic resolution changes (was %dx%d, now %dx%d) are unsupported",
            mFrameWidth, mFrameHeight, img->GetSize().width, img->GetSize().height);
     return NS_ERROR_FAILURE;
   }
 
   ImageFormat format = img->GetFormat();
   if (format == ImageFormat::PLANAR_YCBCR) {
     PlanarYCbCrImage* yuv = static_cast<PlanarYCbCrImage *>(img.get());
 
@@ -365,47 +367,47 @@ nsresult VP8TrackEncoder::PrepareRawFram
                               data->mCbChannel, data->mCbCrStride,
                               data->mCrChannel, data->mCbCrStride,
                               y, mFrameWidth,
                               cb, halfWidth,
                               cr, halfWidth,
                               mFrameWidth, mFrameHeight);
       yuvFormat = "I422";
     } else {
-      VP8LOG("Unsupported planar format\n");
+      VP8LOG(LogLevel::Error, "Unsupported planar format");
       NS_ASSERTION(false, "Unsupported planar format");
       return NS_ERROR_NOT_IMPLEMENTED;
     }
 
     if (rv != 0) {
-      VP8LOG("Converting an %s frame to I420 failed\n", yuvFormat.c_str());
+      VP8LOG(LogLevel::Error, "Converting an %s frame to I420 failed", yuvFormat.c_str());
       return NS_ERROR_FAILURE;
     }
 
-    VP8LOG("Converted an %s frame to I420\n", yuvFormat.c_str());
+    VP8LOG(LogLevel::Verbose, "Converted an %s frame to I420", yuvFormat.c_str());
   } else {
     // Not YCbCr at all. Try to get access to the raw data and convert.
 
     RefPtr<SourceSurface> surf = GetSourceSurface(img.forget());
     if (!surf) {
-      VP8LOG("Getting surface from %s image failed\n", Stringify(format).c_str());
+      VP8LOG(LogLevel::Error, "Getting surface from %s image failed", Stringify(format).c_str());
       return NS_ERROR_FAILURE;
     }
 
     RefPtr<DataSourceSurface> data = surf->GetDataSurface();
     if (!data) {
-      VP8LOG("Getting data surface from %s image with %s (%s) surface failed\n",
+      VP8LOG(LogLevel::Error, "Getting data surface from %s image with %s (%s) surface failed",
              Stringify(format).c_str(), Stringify(surf->GetType()).c_str(),
              Stringify(surf->GetFormat()).c_str());
       return NS_ERROR_FAILURE;
     }
 
     DataSourceSurface::ScopedMap map(data, DataSourceSurface::READ);
     if (!map.IsMapped()) {
-      VP8LOG("Reading DataSourceSurface from %s image with %s (%s) surface failed\n",
+      VP8LOG(LogLevel::Error, "Reading DataSourceSurface from %s image with %s (%s) surface failed",
              Stringify(format).c_str(), Stringify(surf->GetType()).c_str(),
              Stringify(surf->GetFormat()).c_str());
       return NS_ERROR_FAILURE;
     }
 
     int rv;
     switch (surf->GetFormat()) {
       case SurfaceFormat::B8G8R8A8:
@@ -421,30 +423,30 @@ nsresult VP8TrackEncoder::PrepareRawFram
         rv = libyuv::RGB565ToI420(static_cast<uint8*>(map.GetData()),
                                   map.GetStride(),
                                   y, mFrameWidth,
                                   cb, halfWidth,
                                   cr, halfWidth,
                                   mFrameWidth, mFrameHeight);
         break;
       default:
-        VP8LOG("Unsupported SourceSurface format %s\n",
+        VP8LOG(LogLevel::Error, "Unsupported SourceSurface format %s",
                Stringify(surf->GetFormat()).c_str());
         NS_ASSERTION(false, "Unsupported SourceSurface format");
         return NS_ERROR_NOT_IMPLEMENTED;
     }
 
     if (rv != 0) {
-      VP8LOG("%s to I420 conversion failed\n",
+      VP8LOG(LogLevel::Error, "%s to I420 conversion failed",
              Stringify(surf->GetFormat()).c_str());
       return NS_ERROR_FAILURE;
     }
 
-    VP8LOG("Converted a %s frame to I420\n",
-           Stringify(surf->GetFormat()).c_str());
+    VP8LOG(LogLevel::Verbose, "Converted a %s frame to I420",
+             Stringify(surf->GetFormat()).c_str());
   }
 
   mVPXImageWrapper->planes[VPX_PLANE_Y] = y;
   mVPXImageWrapper->planes[VPX_PLANE_U] = cb;
   mVPXImageWrapper->planes[VPX_PLANE_V] = cr;
   mVPXImageWrapper->stride[VPX_PLANE_Y] = mFrameWidth;
   mVPXImageWrapper->stride[VPX_PLANE_U] = halfWidth;
   mVPXImageWrapper->stride[VPX_PLANE_V] = halfWidth;
@@ -522,39 +524,42 @@ VP8TrackEncoder::GetEncodedTrack(Encoded
 
   StreamTime totalProcessedDuration = 0;
   TimeStamp timebase = TimeStamp::Now();
   EncodeOperation nextEncodeOperation = ENCODE_NORMAL_FRAME;
 
   for (VideoSegment::ChunkIterator iter(mSourceSegment);
        !iter.IsEnded(); iter.Next()) {
     VideoChunk &chunk = *iter;
-    VP8LOG("nextEncodeOperation is %d for frame of duration %lld\n",
-           nextEncodeOperation, chunk.GetDuration());
+    VP8LOG(LogLevel::Verbose, "nextEncodeOperation is %d for frame of duration %lld",
+             nextEncodeOperation, chunk.GetDuration());
 
     // Encode frame.
     if (nextEncodeOperation != SKIP_FRAME) {
       nsresult rv = PrepareRawFrame(chunk);
       NS_ENSURE_SUCCESS(rv, NS_ERROR_FAILURE);
 
       // Encode the data with VP8 encoder
-      int flags = (nextEncodeOperation == ENCODE_NORMAL_FRAME) ?
-                  0 : VPX_EFLAG_FORCE_KF;
+      int flags = 0;
+      if (nextEncodeOperation == ENCODE_I_FRAME) {
+        VP8LOG(LogLevel::Warning, "MediaRecorder lagging behind. Encoding keyframe.");
+        flags |= VPX_EFLAG_FORCE_KF;
+      }
       if (vpx_codec_encode(mVPXContext, mVPXImageWrapper, mEncodedTimestamp,
                            (unsigned long)chunk.GetDuration(), flags,
                            VPX_DL_REALTIME)) {
         return NS_ERROR_FAILURE;
       }
       // Get the encoded data from VP8 encoder.
       GetEncodedPartitions(aData);
     } else {
       // SKIP_FRAME
       // Extend the duration of the last encoded data in aData
       // because this frame will be skip.
-      NS_WARNING("MediaRecorder lagging behind. Skipping a frame.");
+      VP8LOG(LogLevel::Warning, "MediaRecorder lagging behind. Skipping a frame.");
       RefPtr<EncodedFrame> last = aData.GetEncodedFrames().LastElement();
       if (last) {
         last->SetDuration(last->GetDuration() + chunk.GetDuration());
       }
     }
 
     // Move forward the mEncodedTimestamp.
     mEncodedTimestamp += chunk.GetDuration();
@@ -566,17 +571,17 @@ VP8TrackEncoder::GetEncodedTrack(Encoded
                                                  totalProcessedDuration);
   }
 
   // Remove the chunks we have processed.
   mSourceSegment.Clear();
 
   // End of stream, pull the rest frames in encoder.
   if (EOS) {
-    VP8LOG("mEndOfStream is true\n");
+    VP8LOG(LogLevel::Debug, "mEndOfStream is true");
     mEncodingComplete = true;
     // Bug 1243611, keep calling vpx_codec_encode and vpx_codec_get_cx_data
     // until vpx_codec_get_cx_data return null.
 
     do {
       if (vpx_codec_encode(mVPXContext, nullptr, mEncodedTimestamp,
                            0, 0, VPX_DL_REALTIME)) {
         return NS_ERROR_FAILURE;