Bug 870002: move data-processing debugs in MSG to level 5 to allow granular logging r=roc
authorRandell Jesup <rjesup@jesup.org>
Thu, 09 May 2013 02:05:03 -0400
changeset 138120 f011e4187ec52b3bccbd98e48b84728b6da4191f
parent 138119 af29f5b97e59e1f1daa3dc0eb794ee21530d2cc6
child 138121 38282df9d4f019d18c4034dad839374adc33bc8f
push id3752
push userlsblakk@mozilla.com
push dateMon, 13 May 2013 17:21:10 +0000
treeherdermozilla-aurora@1580544aef0b [default view] [failures only]
perfherder[talos] [build metrics] [platform microbench] (compared to previous push)
reviewersroc
bugs870002
milestone23.0a1
Bug 870002: move data-processing debugs in MSG to level 5 to allow granular logging r=roc
content/media/MediaStreamGraph.cpp
content/media/TrackUnionStream.h
--- a/content/media/MediaStreamGraph.cpp
+++ b/content/media/MediaStreamGraph.cpp
@@ -114,19 +114,19 @@ MediaStreamGraphImpl::ExtractPendingInpu
     if (aStream->mPullEnabled && !aStream->mFinished &&
         !aStream->mListeners.IsEmpty()) {
       // Compute how much stream time we'll need assuming we don't block
       // the stream at all between mBlockingDecisionsMadeUntilTime and
       // aDesiredUpToTime.
       StreamTime t =
         GraphTimeToStreamTime(aStream, mStateComputedTime) +
         (aDesiredUpToTime - mStateComputedTime);
-      LOG(PR_LOG_DEBUG, ("Calling NotifyPull aStream=%p t=%f current end=%f", aStream,
-                         MediaTimeToSeconds(t),
-                         MediaTimeToSeconds(aStream->mBuffer.GetEnd())));
+      LOG(PR_LOG_DEBUG+1, ("Calling NotifyPull aStream=%p t=%f current end=%f", aStream,
+                           MediaTimeToSeconds(t),
+                           MediaTimeToSeconds(aStream->mBuffer.GetEnd())));
       if (t > aStream->mBuffer.GetEnd()) {
         *aEnsureNextIteration = true;
 #ifdef DEBUG
         if (aStream->mListeners.Length() == 0) {
           LOG(PR_LOG_ERROR, ("No listeners in NotifyPull aStream=%p desired=%f current end=%f",
                              aStream, MediaTimeToSeconds(t),
                              MediaTimeToSeconds(aStream->mBuffer.GetEnd())));
           aStream->DumpTrackInfo();
@@ -158,20 +158,20 @@ MediaStreamGraphImpl::ExtractPendingInpu
                            int64_t(segment->GetDuration())));
         aStream->mBuffer.AddTrack(data->mID, data->mRate, data->mStart, segment);
         // The track has taken ownership of data->mData, so let's replace
         // data->mData with an empty clone.
         data->mData = segment->CreateEmptyClone();
         data->mCommands &= ~SourceMediaStream::TRACK_CREATE;
       } else if (data->mData->GetDuration() > 0) {
         MediaSegment* dest = aStream->mBuffer.FindTrack(data->mID)->GetSegment();
-        LOG(PR_LOG_DEBUG, ("SourceMediaStream %p track %d, advancing end from %lld to %lld",
-                           aStream, data->mID,
-                           int64_t(dest->GetDuration()),
-                           int64_t(dest->GetDuration() + data->mData->GetDuration())));
+        LOG(PR_LOG_DEBUG+1, ("SourceMediaStream %p track %d, advancing end from %lld to %lld",
+                             aStream, data->mID,
+                             int64_t(dest->GetDuration()),
+                             int64_t(dest->GetDuration() + data->mData->GetDuration())));
         dest->AppendFrom(data->mData);
       }
       if (data->mCommands & SourceMediaStream::TRACK_END) {
         aStream->mBuffer.FindTrack(data->mID)->SetEnded();
         aStream->mUpdateTracks.RemoveElementAt(i);
       }
     }
     aStream->mBuffer.AdvanceKnownTracksTime(aStream->mUpdateKnownTracksTime);
@@ -317,20 +317,20 @@ MediaStreamGraphImpl::UpdateCurrentTime(
   GraphTime nextCurrentTime =
     SecondsToMediaTime((now - mCurrentTimeStamp).ToSeconds()) + mCurrentTime;
   if (mStateComputedTime < nextCurrentTime) {
     LOG(PR_LOG_WARNING, ("Media graph global underrun detected"));
     nextCurrentTime = mStateComputedTime;
   }
   mCurrentTimeStamp = now;
 
-  LOG(PR_LOG_DEBUG, ("Updating current time to %f (real %f, mStateComputedTime %f)",
-                     MediaTimeToSeconds(nextCurrentTime),
-                     (now - mInitialTimeStamp).ToSeconds(),
-                     MediaTimeToSeconds(mStateComputedTime)));
+  LOG(PR_LOG_DEBUG+1, ("Updating current time to %f (real %f, mStateComputedTime %f)",
+                       MediaTimeToSeconds(nextCurrentTime),
+                       (now - mInitialTimeStamp).ToSeconds(),
+                       MediaTimeToSeconds(mStateComputedTime)));
 
   if (prevCurrentTime >= nextCurrentTime) {
     NS_ASSERTION(prevCurrentTime == nextCurrentTime, "Time can't go backwards!");
     // This could happen due to low clock resolution, maybe?
     LOG(PR_LOG_DEBUG, ("Time did not advance"));
     // There's not much left to do here, but the code below that notifies
     // listeners that streams have ended still needs to run.
   }
@@ -375,19 +375,19 @@ MediaStreamGraphImpl::UpdateCurrentTime(
       stream->mNotifiedFinished = true;
       stream->mLastPlayedVideoFrame.SetNull();
       for (uint32_t j = 0; j < stream->mListeners.Length(); ++j) {
         MediaStreamListener* l = stream->mListeners[j];
         l->NotifyFinished(this);
       }
     }
 
-    LOG(PR_LOG_DEBUG, ("MediaStream %p bufferStartTime=%f blockedTime=%f",
-                       stream, MediaTimeToSeconds(stream->mBufferStartTime),
-                       MediaTimeToSeconds(blockedTime)));
+    LOG(PR_LOG_DEBUG+1, ("MediaStream %p bufferStartTime=%f blockedTime=%f",
+                         stream, MediaTimeToSeconds(stream->mBufferStartTime),
+                         MediaTimeToSeconds(blockedTime)));
   }
 
   mCurrentTime = nextCurrentTime;
 }
 
 bool
 MediaStreamGraphImpl::WillUnderrun(MediaStream* aStream, GraphTime aTime,
                                    GraphTime aEndBlockingDecisions, GraphTime* aEnd)
@@ -408,30 +408,30 @@ MediaStreamGraphImpl::WillUnderrun(Media
                        aStream, MediaTimeToSeconds(bufferEnd), MediaTimeToSeconds(mCurrentTime),
                        bufferEnd, mCurrentTime, aStream->GetBufferEnd()));
     aStream->DumpTrackInfo();
     NS_ASSERTION(bufferEnd >= mCurrentTime, "Buffer underran");
   }
 #endif
   // We should block after bufferEnd.
   if (bufferEnd <= aTime) {
-    LOG(PR_LOG_DEBUG, ("MediaStream %p will block due to data underrun, "
-                       "bufferEnd %f",
-                       aStream, MediaTimeToSeconds(bufferEnd)));
+    LOG(PR_LOG_DEBUG+1, ("MediaStream %p will block due to data underrun, "
+                         "bufferEnd %f",
+                         aStream, MediaTimeToSeconds(bufferEnd)));
     return true;
   }
   // We should keep blocking if we're currently blocked and we don't have
   // data all the way through to aEndBlockingDecisions. If we don't have
   // data all the way through to aEndBlockingDecisions, we'll block soon,
   // but we might as well remain unblocked and play the data we've got while
   // we can.
   if (bufferEnd <= aEndBlockingDecisions && aStream->mBlocked.GetBefore(aTime)) {
-    LOG(PR_LOG_DEBUG, ("MediaStream %p will block due to speculative data underrun, "
-                       "bufferEnd %f",
-                       aStream, MediaTimeToSeconds(bufferEnd)));
+    LOG(PR_LOG_DEBUG+1, ("MediaStream %p will block due to speculative data underrun, "
+                         "bufferEnd %f",
+                         aStream, MediaTimeToSeconds(bufferEnd)));
     return true;
   }
   // Reconsider decisions at bufferEnd
   *aEnd = std::min(*aEnd, bufferEnd);
   return false;
 }
 
 void
@@ -514,18 +514,18 @@ MediaStreamGraphImpl::UpdateStreamOrder(
   }
 }
 
 void
 MediaStreamGraphImpl::RecomputeBlocking(GraphTime aEndBlockingDecisions)
 {
   bool blockingDecisionsWillChange = false;
 
-  LOG(PR_LOG_DEBUG, ("Media graph %p computing blocking for time %f",
-                     this, MediaTimeToSeconds(mStateComputedTime)));
+  LOG(PR_LOG_DEBUG+1, ("Media graph %p computing blocking for time %f",
+                       this, MediaTimeToSeconds(mStateComputedTime)));
   for (uint32_t i = 0; i < mStreams.Length(); ++i) {
     MediaStream* stream = mStreams[i];
     if (!stream->mInBlockingSet) {
       // Compute a partition of the streams containing 'stream' such that we can
       // compute the blocking status of each subset independently.
       nsAutoTArray<MediaStream*,10> streamSet;
       AddBlockingRelatedStreamsToSet(&streamSet, stream);
 
@@ -541,19 +541,19 @@ MediaStreamGraphImpl::RecomputeBlocking(
     }
 
     GraphTime end;
     stream->mBlocked.GetAt(mCurrentTime, &end);
     if (end < GRAPH_TIME_MAX) {
       blockingDecisionsWillChange = true;
     }
   }
-  LOG(PR_LOG_DEBUG, ("Media graph %p computed blocking for interval %f to %f",
-                     this, MediaTimeToSeconds(mStateComputedTime),
-                     MediaTimeToSeconds(aEndBlockingDecisions)));
+  LOG(PR_LOG_DEBUG+1, ("Media graph %p computed blocking for interval %f to %f",
+                       this, MediaTimeToSeconds(mStateComputedTime),
+                       MediaTimeToSeconds(aEndBlockingDecisions)));
   mStateComputedTime = aEndBlockingDecisions;
  
   if (blockingDecisionsWillChange) {
     // Make sure we wake up to notify listeners about these changes.
     EnsureNextIteration();
   }
 }
 
@@ -617,34 +617,34 @@ MediaStreamGraphImpl::RecomputeBlockingA
   }
 
   for (uint32_t i = 0; i < aStreams.Length(); ++i) {
     MediaStream* stream = aStreams[i];
 
     if (stream->mFinished) {
       GraphTime endTime = StreamTimeToGraphTime(stream, stream->GetBufferEnd());
       if (endTime <= aTime) {
-        LOG(PR_LOG_DEBUG, ("MediaStream %p is blocked due to being finished", stream));
+        LOG(PR_LOG_DEBUG+1, ("MediaStream %p is blocked due to being finished", stream));
         // We'll block indefinitely
         MarkStreamBlocking(stream);
         *aEnd = aEndBlockingDecisions;
         continue;
       } else {
-        LOG(PR_LOG_DEBUG, ("MediaStream %p is finished, but not blocked yet (end at %f, with blocking at %f)",
-                           stream, MediaTimeToSeconds(stream->GetBufferEnd()),
-                           MediaTimeToSeconds(endTime)));
+        LOG(PR_LOG_DEBUG+1, ("MediaStream %p is finished, but not blocked yet (end at %f, with blocking at %f)",
+                             stream, MediaTimeToSeconds(stream->GetBufferEnd()),
+                             MediaTimeToSeconds(endTime)));
         *aEnd = std::min(*aEnd, endTime);
       }
     }
 
     GraphTime end;
     bool explicitBlock = stream->mExplicitBlockerCount.GetAt(aTime, &end) > 0;
     *aEnd = std::min(*aEnd, end);
     if (explicitBlock) {
-      LOG(PR_LOG_DEBUG, ("MediaStream %p is blocked due to explicit blocker", stream));
+      LOG(PR_LOG_DEBUG+1, ("MediaStream %p is blocked due to explicit blocker", stream));
       MarkStreamBlocking(stream);
       continue;
     }
 
     bool underrun = WillUnderrun(stream, aTime, aEndBlockingDecisions, aEnd);
     if (underrun) {
       // We'll block indefinitely
       MarkStreamBlocking(stream);
@@ -770,18 +770,18 @@ MediaStreamGraphImpl::PlayAudio(MediaStr
         // more than one sample away from the ideal amount.
         TrackTicks startTicks =
             TimeToTicksRoundDown(track->GetRate(), audioOutput.mBlockedAudioTime);
         audioOutput.mBlockedAudioTime += end - t;
         TrackTicks endTicks =
             TimeToTicksRoundDown(track->GetRate(), audioOutput.mBlockedAudioTime);
 
         output.InsertNullDataAtStart(endTicks - startTicks);
-        LOG(PR_LOG_DEBUG, ("MediaStream %p writing blocking-silence samples for %f to %f",
-                         aStream, MediaTimeToSeconds(t), MediaTimeToSeconds(end)));
+        LOG(PR_LOG_DEBUG+1, ("MediaStream %p writing blocking-silence samples for %f to %f",
+                             aStream, MediaTimeToSeconds(t), MediaTimeToSeconds(end)));
       } else {
         TrackTicks startTicks =
             track->TimeToTicksRoundDown(GraphTimeToStreamTime(aStream, t));
         TrackTicks endTicks =
             track->TimeToTicksRoundDown(GraphTimeToStreamTime(aStream, end));
 
         // If startTicks is before the track start, then that part of 'audio'
         // will just be silence, which is fine here. But if endTicks is after
@@ -791,19 +791,19 @@ MediaStreamGraphImpl::PlayAudio(MediaStr
         if (sliceEnd > startTicks) {
           output.AppendSlice(*audio, startTicks, sliceEnd);
         }
         // Play silence where the track has ended
         output.AppendNullData(endTicks - sliceEnd);
         NS_ASSERTION(endTicks == sliceEnd || track->IsEnded(),
                      "Ran out of data but track not ended?");
         output.ApplyVolume(volume);
-        LOG(PR_LOG_DEBUG, ("MediaStream %p writing samples for %f to %f (samples %lld to %lld)",
-                           aStream, MediaTimeToSeconds(t), MediaTimeToSeconds(end),
-                           startTicks, endTicks));
+        LOG(PR_LOG_DEBUG+1, ("MediaStream %p writing samples for %f to %f (samples %lld to %lld)",
+                             aStream, MediaTimeToSeconds(t), MediaTimeToSeconds(end),
+                             startTicks, endTicks));
       }
       output.WriteTo(audioOutput.mStream);
       t = end;
     }
   }
 }
 
 void
@@ -833,19 +833,19 @@ MediaStreamGraphImpl::PlayVideo(MediaStr
       start = thisStart;
       frame = thisFrame;
       track = tracks.get();
     }
   }
   if (!frame || *frame == aStream->mLastPlayedVideoFrame)
     return;
 
-  LOG(PR_LOG_DEBUG, ("MediaStream %p writing video frame %p (%dx%d)",
-                     aStream, frame->GetImage(), frame->GetIntrinsicSize().width,
-                     frame->GetIntrinsicSize().height));
+  LOG(PR_LOG_DEBUG+1, ("MediaStream %p writing video frame %p (%dx%d)",
+                       aStream, frame->GetImage(), frame->GetIntrinsicSize().width,
+                       frame->GetIntrinsicSize().height));
   GraphTime startTime = StreamTimeToGraphTime(aStream,
       track->TicksToTimeRoundDown(start), INCLUDE_TRAILING_BLOCKED_INTERVAL);
   TimeStamp targetTime = mCurrentTimeStamp +
       TimeDuration::FromMilliseconds(double(startTime - mCurrentTime));
   for (uint32_t i = 0; i < aStream->mVideoOutputs.Length(); ++i) {
     VideoFrameContainer* output = aStream->mVideoOutputs[i];
     output->SetCurrentFrame(frame->GetIntrinsicSize(), frame->GetImage(),
                             targetTime);
@@ -1059,27 +1059,27 @@ MediaStreamGraphImpl::RunThread()
         TimeStamp now = TimeStamp::Now();
         if (mNeedAnotherIteration) {
           int64_t timeoutMS = MEDIA_GRAPH_TARGET_PERIOD_MS -
             int64_t((now - mCurrentTimeStamp).ToMilliseconds());
           // Make sure timeoutMS doesn't overflow 32 bits by waking up at
           // least once a minute, if we need to wake up at all
           timeoutMS = std::max<int64_t>(0, std::min<int64_t>(timeoutMS, 60*1000));
           timeout = PR_MillisecondsToInterval(uint32_t(timeoutMS));
-          LOG(PR_LOG_DEBUG, ("Waiting for next iteration; at %f, timeout=%f",
-                             (now - mInitialTimeStamp).ToSeconds(), timeoutMS/1000.0));
+          LOG(PR_LOG_DEBUG+1, ("Waiting for next iteration; at %f, timeout=%f",
+                               (now - mInitialTimeStamp).ToSeconds(), timeoutMS/1000.0));
           mWaitState = WAITSTATE_WAITING_FOR_NEXT_ITERATION;
         } else {
           mWaitState = WAITSTATE_WAITING_INDEFINITELY;
         }
         if (timeout > 0) {
           mMonitor.Wait(timeout);
-          LOG(PR_LOG_DEBUG, ("Resuming after timeout; at %f, elapsed=%f",
-                             (TimeStamp::Now() - mInitialTimeStamp).ToSeconds(),
-                             (TimeStamp::Now() - now).ToSeconds()));
+          LOG(PR_LOG_DEBUG+1, ("Resuming after timeout; at %f, elapsed=%f",
+                               (TimeStamp::Now() - mInitialTimeStamp).ToSeconds(),
+                               (TimeStamp::Now() - now).ToSeconds()));
         }
       }
       mWaitState = WAITSTATE_RUNNING;
       mNeedAnotherIteration = false;
       messageQueue.SwapElements(mMessageQueue);
     }
   }
 }
--- a/content/media/TrackUnionStream.h
+++ b/content/media/TrackUnionStream.h
@@ -215,32 +215,32 @@ protected:
           inputTrackEndPoint = aInputTrack->GetEnd();
           *aOutputTrackFinished = true;
         }
       }
 
       if (interval.mInputIsBlocked) {
         // Maybe the input track ended?
         segment->AppendNullData(ticks);
-        LOG(PR_LOG_DEBUG, ("TrackUnionStream %p appending %lld ticks of null data to track %d",
+        LOG(PR_LOG_DEBUG+1, ("TrackUnionStream %p appending %lld ticks of null data to track %d",
             this, (long long)ticks, outputTrack->GetID()));
       } else {
         // Figuring out which samples to use from the input stream is tricky
         // because its start time and our start time may differ by a fraction
         // of a tick. Assuming the input track hasn't ended, we have to ensure
         // that 'ticks' samples are gathered, even though a tick boundary may
         // occur between outputStart and outputEnd but not between inputStart
         // and inputEnd.
         // We'll take the latest samples we can.
         TrackTicks inputEndTicks = TimeToTicksRoundUp(rate, inputEnd);
         TrackTicks inputStartTicks = inputEndTicks - ticks;
         segment->AppendSlice(*aInputTrack->GetSegment(),
                              std::min(inputTrackEndPoint, inputStartTicks),
                              std::min(inputTrackEndPoint, inputEndTicks));
-        LOG(PR_LOG_DEBUG, ("TrackUnionStream %p appending %lld ticks of input data to track %d",
+        LOG(PR_LOG_DEBUG+1, ("TrackUnionStream %p appending %lld ticks of input data to track %d",
             this, (long long)(std::min(inputTrackEndPoint, inputEndTicks) - std::min(inputTrackEndPoint, inputStartTicks)),
             outputTrack->GetID()));
       }
       for (uint32_t j = 0; j < mListeners.Length(); ++j) {
         MediaStreamListener* l = mListeners[j];
         l->NotifyQueuedTrackChanges(Graph(), outputTrack->GetID(),
                                     outputTrack->GetRate(), startTicks, 0,
                                     *segment);