Bug 1423253 - Disregard VideoChunk durations in VideoTrackEncoder. r=padenot
authorAndreas Pehrson <apehrson@mozilla.com>
Fri, 22 Mar 2019 11:43:08 +0000
changeset 465641 2f7f881b7ede6ae637e76e3fa9636f1306058e52
parent 465640 9e6eddfb4ef87865a15258f35eaab0635d197b4e
child 465642 e2a57da64b18b277278e8ecc30108b9de82ef51d
push id35744
push userapavel@mozilla.com
push dateFri, 22 Mar 2019 16:44:08 +0000
treeherdermozilla-central@e66a2b59914d [default view] [failures only]
perfherder[talos] [build metrics] [platform microbench] (compared to previous push)
reviewerspadenot
bugs1423253
milestone68.0a1
first release with
nightly linux32
nightly linux64
nightly mac
nightly win32
nightly win64
last release without
nightly linux32
nightly linux64
nightly mac
nightly win32
nightly win64
Bug 1423253 - Disregard VideoChunk durations in VideoTrackEncoder. r=padenot Long-term we want to lift durations out of video altogether, and only use wall-clock timestamps. This patch achieves this in VideoTrackEncoder. When the MediaStreamGraph is audio-only, the equivalent for video will be completely duration-less. Until we have that, some pieces around the MSG will still need durations for track-bookkeeping reasons. This also integrates the DriftCompensator into VideoTrackEncoder, by compensating for drift when frames are moved from mIncomingBuffer to mOutgoingBuffer, i.e., when we recalculate time stamps into durations for the underlying encoder to use. Differential Revision: https://phabricator.services.mozilla.com/D22909
dom/media/MediaSegment.h
dom/media/MediaStreamGraph.cpp
dom/media/VideoSegment.cpp
dom/media/VideoSegment.h
dom/media/encoder/MediaEncoder.cpp
dom/media/encoder/MediaEncoder.h
dom/media/encoder/TrackEncoder.cpp
dom/media/encoder/TrackEncoder.h
dom/media/gtest/TestVideoTrackEncoder.cpp
--- a/dom/media/MediaSegment.h
+++ b/dom/media/MediaSegment.h
@@ -362,35 +362,16 @@ class MediaSegmentBase : public MediaSeg
     const Chunk& operator*() { return mSegment.mChunks[mIndex]; }
     const Chunk* operator->() { return &mSegment.mChunks[mIndex]; }
 
    private:
     const MediaSegmentBase<C, Chunk>& mSegment;
     uint32_t mIndex;
   };
 
-  Chunk* FindChunkContaining(StreamTime aOffset, StreamTime* aStart = nullptr) {
-    if (aOffset < 0) {
-      return nullptr;
-    }
-    StreamTime offset = 0;
-    for (uint32_t i = 0; i < mChunks.Length(); ++i) {
-      Chunk& c = mChunks[i];
-      StreamTime nextOffset = offset + c.GetDuration();
-      if (aOffset < nextOffset) {
-        if (aStart) {
-          *aStart = offset;
-        }
-        return &c;
-      }
-      offset = nextOffset;
-    }
-    return nullptr;
-  }
-
   void RemoveLeading(StreamTime aDuration) { RemoveLeading(aDuration, 0); }
 
   size_t SizeOfExcludingThis(MallocSizeOf aMallocSizeOf) const override {
     size_t amount = mChunks.ShallowSizeOfExcludingThis(aMallocSizeOf);
     for (size_t i = 0; i < mChunks.Length(); i++) {
       amount += mChunks[i].SizeOfExcludingThisIfUnshared(aMallocSizeOf);
     }
     return amount;
--- a/dom/media/MediaStreamGraph.cpp
+++ b/dom/media/MediaStreamGraph.cpp
@@ -2558,17 +2558,17 @@ void SourceMediaStream::ExtractPendingIn
 
       segment->InsertNullDataAtStart(streamCurrentTime);
       data->mEndOfFlushedData += segment->GetDuration();
       mTracks.AddTrack(data->mID, streamCurrentTime, segment);
       // The track has taken ownership of data->mData, so let's replace
       // data->mData with an empty clone.
       data->mData = segment->CreateEmptyClone();
       data->mCommands &= ~SourceMediaStream::TRACK_CREATE;
-    } else if (data->mData->GetDuration() > 0) {
+    } else {
       MediaSegment* dest = mTracks.FindTrack(data->mID)->GetSegment();
       LOG(LogLevel::Verbose,
           ("%p: SourceMediaStream %p track %d, advancing end from %" PRId64
            " to %" PRId64,
            GraphImpl(), this, data->mID, int64_t(dest->GetDuration()),
            int64_t(dest->GetDuration() + data->mData->GetDuration())));
       data->mEndOfFlushedData += data->mData->GetDuration();
       dest->AppendFrom(data->mData);
@@ -2769,31 +2769,63 @@ void SourceMediaStream::AddDirectTrackLi
   LOG(LogLevel::Debug,
       ("%p: Added direct track listener %p", GraphImpl(), listener.get()));
   listener->NotifyDirectListenerInstalled(
       DirectMediaStreamTrackListener::InstallationResult::SUCCESS);
 
   // Pass buffered data to the listener
   AudioSegment bufferedAudio;
   VideoSegment bufferedVideo;
+  if (isAudio) {
+    // For audio we append all ticks.
+    MediaSegment& trackSegment = *track->GetSegment();
+    if (mTracks.GetForgottenDuration() < trackSegment.GetDuration()) {
+      bufferedAudio.AppendSlice(trackSegment, mTracks.GetForgottenDuration(),
+                                trackSegment.GetDuration());
+    }
+
+    if (TrackData* updateData = FindDataForTrack(aTrackID)) {
+      bufferedAudio.AppendSlice(*updateData->mData, 0,
+                                updateData->mData->GetDuration());
+    }
+  } else {
+    // For video we append all non-null chunks, as we're only interested in
+    // real frames and their timestamps.
+    VideoSegment& trackSegment =
+        static_cast<VideoSegment&>(*track->GetSegment());
+    for (VideoSegment::ConstChunkIterator iter(trackSegment); !iter.IsEnded();
+         iter.Next()) {
+      if (iter->IsNull()) {
+        continue;
+      }
+      MOZ_ASSERT(!iter->mTimeStamp.IsNull());
+      bufferedVideo.AppendFrame(do_AddRef(iter->mFrame.GetImage()), 1,
+                                iter->mFrame.GetIntrinsicSize(),
+                                iter->mFrame.GetPrincipalHandle(),
+                                iter->mFrame.GetForceBlack(), iter->mTimeStamp);
+    }
+
+    if (TrackData* updateData = FindDataForTrack(aTrackID)) {
+      VideoSegment& video = static_cast<VideoSegment&>(*updateData->mData);
+      for (VideoSegment::ConstChunkIterator iter(video); !iter.IsEnded();
+           iter.Next()) {
+        if (iter->IsNull()) {
+          continue;
+        }
+        bufferedVideo.AppendFrame(
+            do_AddRef(iter->mFrame.GetImage()), 1,
+            iter->mFrame.GetIntrinsicSize(), iter->mFrame.GetPrincipalHandle(),
+            iter->mFrame.GetForceBlack(), iter->mTimeStamp);
+      }
+    }
+  }
+
   MediaSegment& bufferedData = isAudio
                                    ? static_cast<MediaSegment&>(bufferedAudio)
                                    : static_cast<MediaSegment&>(bufferedVideo);
-
-  MediaSegment& trackSegment = *track->GetSegment();
-  if (mTracks.GetForgottenDuration() < trackSegment.GetDuration()) {
-    bufferedData.AppendSlice(trackSegment, mTracks.GetForgottenDuration(),
-                             trackSegment.GetDuration());
-  }
-
-  if (TrackData* updateData = FindDataForTrack(aTrackID)) {
-    bufferedData.AppendSlice(*updateData->mData, 0,
-                             updateData->mData->GetDuration());
-  }
-
   if (bufferedData.GetDuration() != 0) {
     listener->NotifyRealtimeTrackData(Graph(), 0, bufferedData);
   }
 }
 
 void SourceMediaStream::RemoveDirectTrackListenerImpl(
     DirectMediaStreamTrackListener* aListener, TrackID aTrackID) {
   MutexAutoLock lock(mMutex);
--- a/dom/media/VideoSegment.cpp
+++ b/dom/media/VideoSegment.cpp
@@ -88,16 +88,17 @@ already_AddRefed<Image> VideoFrame::Crea
 void VideoSegment::AppendFrame(already_AddRefed<Image>&& aImage,
                                StreamTime aDuration,
                                const IntSize& aIntrinsicSize,
                                const PrincipalHandle& aPrincipalHandle,
                                bool aForceBlack, TimeStamp aTimeStamp) {
   VideoChunk* chunk = AppendChunk(aDuration);
   chunk->mTimeStamp = aTimeStamp;
   VideoFrame frame(aImage, aIntrinsicSize);
+  MOZ_ASSERT_IF(!IsNull(), !aTimeStamp.IsNull());
   frame.SetForceBlack(aForceBlack);
   frame.SetPrincipalHandle(aPrincipalHandle);
   chunk->mFrame.TakeFrom(&frame);
 }
 
 VideoSegment::VideoSegment()
     : MediaSegmentBase<VideoSegment, VideoChunk>(VIDEO) {}
 
--- a/dom/media/VideoSegment.h
+++ b/dom/media/VideoSegment.h
@@ -129,31 +129,31 @@ class VideoSegment : public MediaSegment
     if (!c) {
       return nullptr;
     }
     if (aStart) {
       *aStart = mDuration - c->mDuration;
     }
     return &c->mFrame;
   }
-  VideoChunk* FindChunkContainingTime(const TimeStamp& aTime) {
+  VideoChunk* FindChunkContaining(const TimeStamp& aTime) {
     VideoChunk* previousChunk = nullptr;
     for (VideoChunk& c : mChunks) {
       if (c.mTimeStamp.IsNull()) {
         continue;
       }
       if (c.mTimeStamp > aTime) {
         return previousChunk;
       }
       previousChunk = &c;
     }
     return previousChunk;
   }
   void ForgetUpToTime(const TimeStamp& aTime) {
-    VideoChunk* chunk = FindChunkContainingTime(aTime);
+    VideoChunk* chunk = FindChunkContaining(aTime);
     if (!chunk) {
       return;
     }
     StreamTime duration = 0;
     size_t chunksToRemove = 0;
     for (const VideoChunk& c : mChunks) {
       if (c.mTimeStamp >= chunk->mTimeStamp) {
         break;
--- a/dom/media/encoder/MediaEncoder.cpp
+++ b/dom/media/encoder/MediaEncoder.cpp
@@ -197,55 +197,31 @@ class MediaEncoder::VideoTrackListener :
     TRACE_COMMENT("Encoder %p", mEncoder.get());
     MOZ_ASSERT(mEncoder);
     MOZ_ASSERT(mEncoderThread);
 
     if (mShutdown) {
       return;
     }
 
+    const TimeStamp now = TimeStamp::Now();
     if (!mInitialized) {
-      nsresult rv = mEncoderThread->Dispatch(NewRunnableMethod<StreamTime>(
+      nsresult rv = mEncoderThread->Dispatch(NewRunnableMethod<TimeStamp>(
           "mozilla::VideoTrackEncoder::SetStartOffset", mEncoder,
-          &VideoTrackEncoder::SetStartOffset, aTrackOffset));
+          &VideoTrackEncoder::SetStartOffset, now));
       MOZ_DIAGNOSTIC_ASSERT(NS_SUCCEEDED(rv));
       Unused << rv;
       mInitialized = true;
     }
 
-    AutoTArray<Pair<bool, StreamTime>, 2> nulledSequence;
-    for (VideoSegment::ConstChunkIterator iter(
-             static_cast<const VideoSegment&>(aQueuedMedia));
-         !iter.IsEnded(); iter.Next()) {
-      if (!nulledSequence.IsEmpty()) {
-        Pair<bool, StreamTime>& last = nulledSequence.LastElement();
-        if (last.first() == iter->IsNull()) {
-          last.second() += iter->GetDuration();
-          continue;
-        }
-      }
-      nulledSequence.AppendElement(
-          MakePair(iter->IsNull(), iter->GetDuration()));
-    }
-
-    for (const Pair<bool, StreamTime>& nulledRange : nulledSequence) {
-      if (nulledRange.first()) {
-        nsresult rv = mEncoderThread->Dispatch(NewRunnableMethod<StreamTime>(
-            "mozilla::VideoTrackEncoder::AdvanceBlockedInput", mEncoder,
-            &VideoTrackEncoder::AdvanceBlockedInput, nulledRange.second()));
-        MOZ_DIAGNOSTIC_ASSERT(NS_SUCCEEDED(rv));
-        Unused << rv;
-      } else {
-        nsresult rv = mEncoderThread->Dispatch(NewRunnableMethod<StreamTime>(
-            "mozilla::VideoTrackEncoder::AdvanceCurrentTime", mEncoder,
-            &VideoTrackEncoder::AdvanceCurrentTime, nulledRange.second()));
-        MOZ_DIAGNOSTIC_ASSERT(NS_SUCCEEDED(rv));
-        Unused << rv;
-      }
-    }
+    nsresult rv = mEncoderThread->Dispatch(NewRunnableMethod<TimeStamp>(
+        "mozilla::VideoTrackEncoder::AdvanceCurrentTime", mEncoder,
+        &VideoTrackEncoder::AdvanceCurrentTime, now));
+    MOZ_DIAGNOSTIC_ASSERT(NS_SUCCEEDED(rv));
+    Unused << rv;
   }
 
   void NotifyRealtimeTrackData(MediaStreamGraph* aGraph,
                                StreamTime aTrackOffset,
                                const MediaSegment& aMedia) override {
     TRACE_COMMENT("Encoder %p", mEncoder.get());
     MOZ_ASSERT(mEncoder);
     MOZ_ASSERT(mEncoderThread);
--- a/dom/media/encoder/MediaEncoder.h
+++ b/dom/media/encoder/MediaEncoder.h
@@ -24,16 +24,17 @@ class TaskQueue;
 
 namespace dom {
 class AudioNode;
 class AudioStreamTrack;
 class MediaStreamTrack;
 class VideoStreamTrack;
 }  // namespace dom
 
+class DriftCompensator;
 class MediaEncoder;
 
 class MediaEncoderListener {
  public:
   NS_INLINE_DECL_THREADSAFE_REFCOUNTING(MediaEncoderListener)
   virtual void Initialized() = 0;
   virtual void DataAvailable() = 0;
   virtual void Error() = 0;
@@ -253,16 +254,17 @@ class MediaEncoder {
   void SetError();
 
   // Get encoded data from trackEncoder and write to muxer
   nsresult WriteEncodedDataToMuxer(TrackEncoder* aTrackEncoder);
   // Get metadata from trackEncoder and copy to muxer
   nsresult CopyMetadataToMuxer(TrackEncoder* aTrackEncoder);
 
   const RefPtr<TaskQueue> mEncoderThread;
+  const RefPtr<DriftCompensator> mDriftCompensator;
 
   UniquePtr<ContainerWriter> mWriter;
   RefPtr<AudioTrackEncoder> mAudioEncoder;
   RefPtr<AudioTrackListener> mAudioListener;
   RefPtr<VideoTrackEncoder> mVideoEncoder;
   RefPtr<VideoTrackListener> mVideoListener;
   RefPtr<EncoderListener> mEncoderListener;
   nsTArray<RefPtr<MediaEncoderListener>> mListeners;
--- a/dom/media/encoder/TrackEncoder.cpp
+++ b/dom/media/encoder/TrackEncoder.cpp
@@ -33,17 +33,16 @@ static const int DEFAULT_KEYFRAME_INTERV
 
 TrackEncoder::TrackEncoder(TrackRate aTrackRate)
     : mEncodingComplete(false),
       mEosSetInEncoder(false),
       mInitialized(false),
       mEndOfStream(false),
       mCanceled(false),
       mInitCounter(0),
-      mNotInitDuration(0),
       mSuspended(false),
       mTrackRate(aTrackRate) {}
 
 bool TrackEncoder::IsInitialized() {
   MOZ_ASSERT(!mWorkerThread || mWorkerThread->IsCurrentThreadIn());
   return mInitialized;
 }
 
@@ -310,60 +309,65 @@ VideoTrackEncoder::VideoTrackEncoder(Ref
       mDisplayHeight(0),
       mEncodedTicks(0),
       mVideoBitrate(0),
       mFrameDroppingMode(aFrameDroppingMode),
       mKeyFrameInterval(DEFAULT_KEYFRAME_INTERVAL_MS) {
   mLastChunk.mDuration = 0;
 }
 
-void VideoTrackEncoder::Suspend(TimeStamp aTime) {
+void VideoTrackEncoder::Suspend(const TimeStamp& aTime) {
   MOZ_ASSERT(!mWorkerThread || mWorkerThread->IsCurrentThreadIn());
-  TRACK_LOG(LogLevel::Info, ("[VideoTrackEncoder %p]: Suspend(), was %s", this,
-                             mSuspended ? "suspended" : "live"));
+  TRACK_LOG(LogLevel::Info,
+            ("[VideoTrackEncoder %p]: Suspend() at %.3fs, was %s", this,
+             mStartTime.IsNull() ? 0.0 : (aTime - mStartTime).ToSeconds(),
+             mSuspended ? "suspended" : "live"));
 
   if (mSuspended) {
     return;
   }
 
   mSuspended = true;
   mSuspendTime = aTime;
 }
 
-void VideoTrackEncoder::Resume(TimeStamp aTime) {
+void VideoTrackEncoder::Resume(const TimeStamp& aTime) {
   MOZ_ASSERT(!mWorkerThread || mWorkerThread->IsCurrentThreadIn());
-  TRACK_LOG(LogLevel::Info, ("[VideoTrackEncoder %p]: Resume(), was %s", this,
-                             mSuspended ? "suspended" : "live"));
 
   if (!mSuspended) {
     return;
   }
 
+  TRACK_LOG(
+      LogLevel::Info,
+      ("[VideoTrackEncoder %p]: Resume() after %.3fs, was %s", this,
+       (aTime - mSuspendTime).ToSeconds(), mSuspended ? "suspended" : "live"));
+
   mSuspended = false;
 
   TimeDuration suspendDuration = aTime - mSuspendTime;
   if (!mLastChunk.mTimeStamp.IsNull()) {
-    VideoChunk* nextChunk = mIncomingBuffer.FindChunkContaining(mCurrentTime);
-    if (nextChunk && nextChunk->mTimeStamp < aTime) {
+    VideoChunk* nextChunk = mIncomingBuffer.FindChunkContaining(aTime);
+    MOZ_ASSERT_IF(nextChunk, nextChunk->mTimeStamp <= aTime);
+    if (nextChunk) {
       nextChunk->mTimeStamp = aTime;
     }
     mLastChunk.mTimeStamp += suspendDuration;
   }
   if (!mStartTime.IsNull()) {
     mStartTime += suspendDuration;
   }
 
   mSuspendTime = TimeStamp();
 }
 
 void VideoTrackEncoder::AppendVideoSegment(VideoSegment&& aSegment) {
   MOZ_ASSERT(!mWorkerThread || mWorkerThread->IsCurrentThreadIn());
   TRACK_LOG(LogLevel::Verbose,
-            ("[VideoTrackEncoder %p]: AppendVideoSegment() duration=%" PRIu64,
-             this, aSegment.GetDuration()));
+            ("[VideoTrackEncoder %p]: AppendVideoSegment()", this));
 
   if (mCanceled) {
     return;
   }
 
   if (mEndOfStream) {
     return;
   }
@@ -378,17 +382,17 @@ void VideoTrackEncoder::TakeTrackData(Vi
     return;
   }
 
   aSegment.AppendFrom(&mOutgoingBuffer);
   mOutgoingBuffer.Clear();
 }
 
 void VideoTrackEncoder::Init(const VideoSegment& aSegment,
-                             StreamTime aDuration) {
+                             const TimeStamp& aTime) {
   MOZ_ASSERT(!mWorkerThread || mWorkerThread->IsCurrentThreadIn());
 
   if (mInitialized) {
     return;
   }
 
   mInitCounter++;
   TRACK_LOG(LogLevel::Debug,
@@ -414,34 +418,31 @@ void VideoTrackEncoder::Init(const Video
       TRACK_LOG(
           LogLevel::Error,
           ("[VideoTrackEncoder %p]: Failed to initialize the encoder!", this));
       OnError();
     }
     break;
   }
 
-  mNotInitDuration += aDuration;
-  if ((mNotInitDuration / mTrackRate > VIDEO_INIT_FAILED_DURATION) &&
+  if (((aTime - mStartTime).ToSeconds() > VIDEO_INIT_FAILED_DURATION) &&
       mInitCounter > 1) {
     TRACK_LOG(LogLevel::Warning,
               ("[VideoTrackEncoder %p]: No successful init for %ds.", this,
                VIDEO_INIT_FAILED_DURATION));
     Telemetry::Accumulate(
         Telemetry::MEDIA_RECORDER_TRACK_ENCODER_INIT_TIMEOUT_TYPE, 1);
     OnError();
     return;
   }
 }
 
 void VideoTrackEncoder::Cancel() {
   MOZ_ASSERT(!mWorkerThread || mWorkerThread->IsCurrentThreadIn());
-  TRACK_LOG(LogLevel::Info,
-            ("[VideoTrackEncoder %p]: Cancel(), currentTime=%" PRIu64, this,
-             mCurrentTime));
+  TRACK_LOG(LogLevel::Info, ("[VideoTrackEncoder %p]: Cancel()", this));
   mCanceled = true;
   mIncomingBuffer.Clear();
   mOutgoingBuffer.Clear();
   mLastChunk.SetNull(0);
 }
 
 void VideoTrackEncoder::NotifyEndOfStream() {
   MOZ_ASSERT(!mWorkerThread || mWorkerThread->IsCurrentThreadIn());
@@ -454,220 +455,219 @@ void VideoTrackEncoder::NotifyEndOfStrea
   }
 
   if (mEndOfStream) {
     // We have already been notified.
     return;
   }
 
   mEndOfStream = true;
-  TRACK_LOG(
-      LogLevel::Info,
-      ("[VideoTrackEncoder %p]: NotifyEndOfStream(), currentTime=%" PRIu64,
-       this, mCurrentTime));
+  TRACK_LOG(LogLevel::Info,
+            ("[VideoTrackEncoder %p]: NotifyEndOfStream()", this));
 
-  if (!mLastChunk.IsNull() && mLastChunk.mDuration > 0) {
+  if (!mLastChunk.IsNull()) {
     RefPtr<layers::Image> lastImage = mLastChunk.mFrame.GetImage();
-    TRACK_LOG(LogLevel::Debug,
-              ("[VideoTrackEncoder]: Appending last video frame %p, "
-               "duration=%.5f",
-               lastImage.get(),
-               FramesToTimeUnit(mLastChunk.mDuration, mTrackRate).ToSeconds()));
-    mOutgoingBuffer.AppendFrame(
-        lastImage.forget(), mLastChunk.mDuration,
-        mLastChunk.mFrame.GetIntrinsicSize(), PRINCIPAL_HANDLE_NONE,
-        mLastChunk.mFrame.GetForceBlack(), mLastChunk.mTimeStamp);
+    const TimeStamp now = TimeStamp::Now();
+    TimeStamp currentTime = mSuspended ? mSuspendTime : mCurrentTime;
+    currentTime = mDriftCompensator->GetVideoTime(now, currentTime);
+    TimeDuration absoluteEndTime = currentTime - mStartTime;
+    CheckedInt64 duration =
+        UsecsToFrames(absoluteEndTime.ToMicroseconds(), mTrackRate) -
+        mEncodedTicks;
+    if (duration.isValid() && duration.value() > 0) {
+      mEncodedTicks += duration.value();
+      TRACK_LOG(LogLevel::Debug,
+                ("[VideoTrackEncoder %p]: Appending last video frame %p at pos "
+                 "%.3fs, "
+                 "track-end=%.3fs",
+                 this, lastImage.get(),
+                 (mLastChunk.mTimeStamp - mStartTime).ToSeconds(),
+                 absoluteEndTime.ToSeconds()));
+      mOutgoingBuffer.AppendFrame(
+          lastImage.forget(), duration.value(),
+          mLastChunk.mFrame.GetIntrinsicSize(), PRINCIPAL_HANDLE_NONE,
+          mLastChunk.mFrame.GetForceBlack(), mLastChunk.mTimeStamp);
+    }
   }
 
   mIncomingBuffer.Clear();
   mLastChunk.SetNull(0);
 
   if (mInitialized && !mCanceled) {
     OnDataAvailable();
   }
 }
 
-void VideoTrackEncoder::SetStartOffset(StreamTime aStartOffset) {
+void VideoTrackEncoder::SetStartOffset(const TimeStamp& aStartOffset) {
   MOZ_ASSERT(!mWorkerThread || mWorkerThread->IsCurrentThreadIn());
-  MOZ_ASSERT(mCurrentTime == 0);
-  TRACK_LOG(LogLevel::Info,
-            ("[VideoTrackEncoder %p]: SetStartOffset(), aStartOffset=%" PRIu64,
-             this, aStartOffset));
-  mIncomingBuffer.InsertNullDataAtStart(aStartOffset);
+  MOZ_ASSERT(mCurrentTime.IsNull());
+  TRACK_LOG(LogLevel::Info, ("[VideoTrackEncoder %p]: SetStartOffset()", this));
+  mStartTime = aStartOffset;
   mCurrentTime = aStartOffset;
 }
 
-void VideoTrackEncoder::AdvanceBlockedInput(StreamTime aDuration) {
-  MOZ_ASSERT(!mWorkerThread || mWorkerThread->IsCurrentThreadIn());
-  TRACK_LOG(
-      LogLevel::Verbose,
-      ("[VideoTrackEncoder %p]: AdvanceBlockedInput(), aDuration=%" PRIu64,
-       this, aDuration));
-
-  // We call Init here so it can account for aDuration towards the Init timeout
-  Init(mOutgoingBuffer, aDuration);
-
-  mIncomingBuffer.InsertNullDataAtStart(aDuration);
-  mCurrentTime += aDuration;
-}
-
-void VideoTrackEncoder::AdvanceCurrentTime(StreamTime aDuration) {
+void VideoTrackEncoder::AdvanceCurrentTime(const TimeStamp& aTime) {
   AUTO_PROFILER_LABEL("VideoTrackEncoder::AdvanceCurrentTime", OTHER);
 
   MOZ_ASSERT(!mWorkerThread || mWorkerThread->IsCurrentThreadIn());
+  MOZ_ASSERT(!mStartTime.IsNull());
+  MOZ_ASSERT(!mCurrentTime.IsNull());
 
   if (mCanceled) {
     return;
   }
 
   if (mEndOfStream) {
     return;
   }
 
-  TRACK_LOG(LogLevel::Verbose,
-            ("[VideoTrackEncoder %p]: AdvanceCurrentTime() %" PRIu64, this,
-             aDuration));
-
-  StreamTime currentTime = mCurrentTime + aDuration;
+  MOZ_ASSERT(!mStartTime.IsNull());
 
   if (mSuspended) {
-    mCurrentTime = currentTime;
-    mIncomingBuffer.ForgetUpTo(mCurrentTime);
+    TRACK_LOG(
+        LogLevel::Verbose,
+        ("[VideoTrackEncoder %p]: AdvanceCurrentTime() suspended at %.3fs",
+         this, (mCurrentTime - mStartTime).ToSeconds()));
+    mCurrentTime = aTime;
+    mIncomingBuffer.ForgetUpToTime(mCurrentTime);
     return;
   }
 
+  TRACK_LOG(LogLevel::Verbose,
+            ("[VideoTrackEncoder %p]: AdvanceCurrentTime() to %.3fs", this,
+             (aTime - mStartTime).ToSeconds()));
+
+  // Grab frames within the currentTime range from the incoming buffer.
   VideoSegment tempSegment;
-  if (currentTime <= mIncomingBuffer.GetDuration()) {
-    tempSegment.AppendSlice(mIncomingBuffer, mCurrentTime, currentTime);
-  } else {
-    NS_ASSERTION(false,
-                 "VideoTrackEncoder::AdvanceCurrentTime Not enough data");
-    TRACK_LOG(
-        LogLevel::Error,
-        ("[VideoTrackEncoder %p]: AdvanceCurrentTime() Not enough data. "
-         "In incoming=%" PRIu64 ", aDuration=%" PRIu64 ", currentTime=%" PRIu64,
-         this, mIncomingBuffer.GetDuration(), aDuration, currentTime));
-    OnError();
+  {
+    VideoChunk* previousChunk = &mLastChunk;
+    auto appendDupes = [&](const TimeStamp& aUpTo) {
+      while ((aUpTo - previousChunk->mTimeStamp).ToSeconds() > 1.0) {
+        // We encode at least one frame per second, even if there are none
+        // flowing.
+        previousChunk->mTimeStamp += TimeDuration::FromSeconds(1.0);
+        tempSegment.AppendFrame(do_AddRef(previousChunk->mFrame.GetImage()), 1,
+                                previousChunk->mFrame.GetIntrinsicSize(),
+                                previousChunk->mFrame.GetPrincipalHandle(),
+                                previousChunk->mFrame.GetForceBlack(),
+                                previousChunk->mTimeStamp);
+        TRACK_LOG(
+            LogLevel::Verbose,
+            ("[VideoTrackEncoder %p]: Duplicating video frame (%p) at pos %.3f",
+             this, previousChunk->mFrame.GetImage(),
+             (previousChunk->mTimeStamp - mStartTime).ToSeconds()));
+      }
+    };
+    for (VideoSegment::ChunkIterator iter(mIncomingBuffer); !iter.IsEnded();
+         iter.Next()) {
+      MOZ_ASSERT(!iter->IsNull());
+      if (!previousChunk->IsNull() &&
+          iter->mTimeStamp <= previousChunk->mTimeStamp) {
+        // This frame starts earlier than previousChunk. Skip.
+        continue;
+      }
+      if (iter->mTimeStamp >= aTime) {
+        // This frame starts in the future. Stop.
+        break;
+      }
+      if (!previousChunk->IsNull()) {
+        appendDupes(iter->mTimeStamp);
+      }
+      tempSegment.AppendFrame(do_AddRef(iter->mFrame.GetImage()), 1,
+                              iter->mFrame.GetIntrinsicSize(),
+                              iter->mFrame.GetPrincipalHandle(),
+                              iter->mFrame.GetForceBlack(), iter->mTimeStamp);
+      TRACK_LOG(LogLevel::Verbose,
+                ("[VideoTrackEncoder %p]: Taking video frame (%p) at pos %.3f",
+                 this, iter->mFrame.GetImage(),
+                 (iter->mTimeStamp - mStartTime).ToSeconds()));
+      previousChunk = &*iter;
+    }
+    if (!previousChunk->IsNull()) {
+      appendDupes(aTime);
+    }
   }
+  mCurrentTime = aTime;
+  mIncomingBuffer.ForgetUpToTime(mCurrentTime);
 
-  mCurrentTime = currentTime;
-  mIncomingBuffer.ForgetUpTo(mCurrentTime);
-
+  // Convert tempSegment timestamps to durations and add chunks with known
+  // duration to mOutgoingBuffer.
+  const TimeStamp now = TimeStamp::Now();
   bool chunkAppended = false;
-
-  // Convert tempSegment timestamps to durations and add it to mOutgoingBuffer.
-  VideoSegment::ConstChunkIterator iter(tempSegment);
-  for (; !iter.IsEnded(); iter.Next()) {
+  for (VideoSegment::ConstChunkIterator iter(tempSegment); !iter.IsEnded();
+       iter.Next()) {
     VideoChunk chunk = *iter;
 
     if (mLastChunk.mTimeStamp.IsNull()) {
-      if (chunk.IsNull()) {
-        // The start of this track is frameless. We need to track the time
-        // it takes to get the first frame.
-        mLastChunk.mDuration += chunk.mDuration;
-        continue;
-      }
-
-      // This is the first real chunk in the track. Use its timestamp as the
-      // starting point for this track.
-      MOZ_ASSERT(!chunk.mTimeStamp.IsNull());
-      const StreamTime nullDuration = mLastChunk.mDuration;
-      mLastChunk = chunk;
-      chunk.mDuration = 0;
+      // This is the first real chunk in the track. Make it start at the
+      // beginning of the track.
+      MOZ_ASSERT(!iter->mTimeStamp.IsNull());
 
-      TRACK_LOG(LogLevel::Verbose,
-                ("[VideoTrackEncoder]: Got first video chunk after %" PRId64
-                 " ticks.",
-                 nullDuration));
-      // Adapt to the time before the first frame. This extends the first frame
-      // from [start, end] to [0, end], but it'll do for now.
-      auto diff = FramesToTimeUnit(nullDuration, mTrackRate);
-      if (!diff.IsValid()) {
-        NS_ERROR("null duration overflow");
-        return;
-      }
+      TRACK_LOG(
+          LogLevel::Verbose,
+          ("[VideoTrackEncoder %p]: Got the first video frame (%p) at pos %.3f "
+           "(moving it to beginning)",
+           this, iter->mFrame.GetImage(),
+           (iter->mTimeStamp - mStartTime).ToSeconds()));
 
-      mLastChunk.mTimeStamp -= diff.ToTimeDuration();
-      mLastChunk.mDuration += nullDuration;
+      mLastChunk = *iter;
+      mLastChunk.mTimeStamp = mStartTime;
+      continue;
     }
 
     MOZ_ASSERT(!mLastChunk.IsNull());
-    if (mLastChunk.CanCombineWithFollowing(chunk) || chunk.IsNull()) {
-      TRACK_LOG(LogLevel::Verbose,
-                ("[VideoTrackEncoder]: Got dupe or null chunk."));
-      // This is the same frame as before (or null). We extend the last chunk
-      // with its duration.
-      mLastChunk.mDuration += chunk.mDuration;
-
-      if (mLastChunk.mDuration < mTrackRate) {
-        TRACK_LOG(LogLevel::Verbose, ("[VideoTrackEncoder]: Ignoring dupe/null "
-                                      "chunk of duration %" PRId64,
-                                      chunk.mDuration));
-        continue;
-      }
-
-      TRACK_LOG(LogLevel::Verbose,
-                ("[VideoTrackEncoder]: Chunk >1 second. duration=%" PRId64 ", "
-                 "trackRate=%" PRId32,
-                 mLastChunk.mDuration, mTrackRate));
+    MOZ_ASSERT(!chunk.IsNull());
 
-      // If we have gotten dupes for over a second, we force send one
-      // to the encoder to make sure there is some output.
-      chunk.mTimeStamp = mLastChunk.mTimeStamp + TimeDuration::FromSeconds(1);
-      chunk.mDuration = mLastChunk.mDuration - mTrackRate;
-      mLastChunk.mDuration = mTrackRate;
-
-      if (chunk.IsNull()) {
-        // Ensure that we don't pass null to the encoder by making mLastChunk
-        // null later on.
-        chunk.mFrame = mLastChunk.mFrame;
-      }
-    }
-
-    if (mStartTime.IsNull()) {
-      mStartTime = mLastChunk.mTimeStamp;
-    }
-
-    TimeDuration relativeTime = chunk.mTimeStamp - mStartTime;
-    RefPtr<layers::Image> lastImage = mLastChunk.mFrame.GetImage();
+    TimeDuration absoluteEndTime =
+        mDriftCompensator->GetVideoTime(now, chunk.mTimeStamp) - mStartTime;
     TRACK_LOG(LogLevel::Verbose,
-              ("[VideoTrackEncoder]: Appending video frame %p, at pos %.5fs",
-               lastImage.get(), relativeTime.ToSeconds()));
+              ("[VideoTrackEncoder %p]: Appending video frame %p, at pos %.3fs "
+               "until %.3fs",
+               this, mLastChunk.mFrame.GetImage(),
+               (mDriftCompensator->GetVideoTime(now, mLastChunk.mTimeStamp) -
+                mStartTime)
+                   .ToSeconds(),
+               absoluteEndTime.ToSeconds()));
     CheckedInt64 duration =
-        UsecsToFrames(relativeTime.ToMicroseconds(), mTrackRate) -
+        UsecsToFrames(absoluteEndTime.ToMicroseconds(), mTrackRate) -
         mEncodedTicks;
     if (!duration.isValid()) {
       NS_ERROR("Duration overflow");
       return;
     }
 
     if (duration.value() <= 0) {
-      // The timestamp for mLastChunk is newer than for chunk.
-      // This means the durations reported from MediaStreamGraph for
-      // mLastChunk were larger than the timestamp diff - and durations were
-      // used to trigger the 1-second frame above. This could happen due to
-      // drift or underruns in the graph.
-      TRACK_LOG(LogLevel::Warning,
-                ("[VideoTrackEncoder]: Underrun detected. Diff=%" PRId64,
-                 duration.value()));
-      chunk.mTimeStamp = mLastChunk.mTimeStamp;
-    } else {
-      mEncodedTicks += duration.value();
-      mOutgoingBuffer.AppendFrame(
-          lastImage.forget(), duration.value(),
-          mLastChunk.mFrame.GetIntrinsicSize(), PRINCIPAL_HANDLE_NONE,
-          mLastChunk.mFrame.GetForceBlack(), mLastChunk.mTimeStamp);
-      chunkAppended = true;
+      // A frame either started before the last frame (can happen when
+      // multiple frames are added before SetStartOffset), or
+      // two frames were so close together that they ended up at the same
+      // position. We handle both cases by ignoring the previous frame.
+
+      TRACK_LOG(LogLevel::Verbose,
+                ("[VideoTrackEncoder %p]: Duration from frame %p to frame %p "
+                 "is %" PRId64 ". Ignoring %p",
+                 this, mLastChunk.mFrame.GetImage(), iter->mFrame.GetImage(),
+                 duration.value(), mLastChunk.mFrame.GetImage()));
+
+      TimeStamp t = mLastChunk.mTimeStamp;
+      mLastChunk = *iter;
+      mLastChunk.mTimeStamp = t;
+      continue;
     }
 
+    mEncodedTicks += duration.value();
+    mOutgoingBuffer.AppendFrame(
+        do_AddRef(mLastChunk.mFrame.GetImage()), duration.value(),
+        mLastChunk.mFrame.GetIntrinsicSize(), PRINCIPAL_HANDLE_NONE,
+        mLastChunk.mFrame.GetForceBlack(), mLastChunk.mTimeStamp);
+    chunkAppended = true;
     mLastChunk = chunk;
   }
 
   if (chunkAppended) {
-    Init(mOutgoingBuffer, aDuration);
+    Init(mOutgoingBuffer, mCurrentTime);
     if (mInitialized) {
       OnDataAvailable();
     }
   }
 }
 
 size_t VideoTrackEncoder::SizeOfExcludingThis(
     mozilla::MallocSizeOf aMallocSizeOf) {
--- a/dom/media/encoder/TrackEncoder.h
+++ b/dom/media/encoder/TrackEncoder.h
@@ -162,18 +162,20 @@ class TrackEncoder {
 
   /**
    * True once this encoding has been cancelled.
    */
   bool mCanceled;
 
   // How many times we have tried to initialize the encoder.
   uint32_t mInitCounter;
-  StreamTime mNotInitDuration;
 
+  /**
+   * True if this TrackEncoder is currently suspended.
+   */
   bool mSuspended;
 
   /**
    * The track rate of source media.
    */
   TrackRate mTrackRate;
 
   /**
@@ -185,16 +187,17 @@ class TrackEncoder {
 };
 
 class AudioTrackEncoder : public TrackEncoder {
  public:
   explicit AudioTrackEncoder(TrackRate aTrackRate)
       : TrackEncoder(aTrackRate),
         mChannels(0),
         mSamplingRate(0),
+        mNotInitDuration(0),
         mAudioBitrate(0) {}
 
   /**
    * Suspends encoding from now, i.e., all future audio data received through
    * AppendAudioSegment() until the next Resume() will be dropped.
    */
   void Suspend();
 
@@ -312,18 +315,19 @@ class AudioTrackEncoder : public TrackEn
 
   /**
    * A segment queue of outgoing audio track data to the encoder.
    * The contents of mOutgoingBuffer will always be what has been appended on
    * the encoder thread but not yet consumed by the encoder sub class.
    */
   AudioSegment mOutgoingBuffer;
 
+  StreamTime mNotInitDuration;
+
   uint32_t mAudioBitrate;
-
 };
 
 enum class FrameDroppingMode {
   ALLOW,     // Allowed to drop frames to keep up under load
   DISALLOW,  // Must not drop any frames, even if it means we will OOM
 };
 
 class VideoTrackEncoder : public TrackEncoder {
@@ -331,22 +335,22 @@ class VideoTrackEncoder : public TrackEn
   explicit VideoTrackEncoder(RefPtr<DriftCompensator> aDriftCompensator,
                              TrackRate aTrackRate,
                              FrameDroppingMode aFrameDroppingMode);
 
   /**
    * Suspends encoding from aTime, i.e., all video frame with a timestamp
    * between aTime and the timestamp of the next Resume() will be dropped.
    */
-  void Suspend(TimeStamp aTime);
+  void Suspend(const TimeStamp& aTime);
 
   /**
    * Resumes encoding starting at aTime.
    */
-  void Resume(TimeStamp aTime);
+  void Resume(const TimeStamp& aTime);
 
   /**
    * Appends source video frames to mIncomingBuffer. We only append the source
    * chunk if the image is different from mLastChunk's image. Called on the
    * MediaStreamGraph thread.
    */
   void AppendVideoSegment(VideoSegment&& aSegment);
 
@@ -369,50 +373,43 @@ class VideoTrackEncoder : public TrackEn
    * Tries to initiate the VideoEncoder based on data in aSegment.
    * This can be re-called often, as it will exit early should we already be
    * initiated. mInitiated will only be set if there was enough data in
    * aSegment to infer metadata. If mInitiated gets set, listeners are notified.
    *
    * Failing to initiate the encoder for an accumulated aDuration of 30 seconds
    * is seen as an error and will cancel the current encoding.
    */
-  void Init(const VideoSegment& aSegment, StreamTime aDuration);
+  void Init(const VideoSegment& aSegment, const TimeStamp& aTime);
 
   StreamTime SecondsToMediaTime(double aS) const {
     NS_ASSERTION(0 <= aS && aS <= TRACK_TICKS_MAX / TRACK_RATE_MAX,
                  "Bad seconds");
     return mTrackRate * aS;
   }
 
-  void SetStartOffset(StreamTime aStartOffset);
+  /**
+   * MediaStreamGraph notifies us about the time of the track's start.
+   * This gets called on the MediaEncoder thread after a dispatch.
+   */
+  void SetStartOffset(const TimeStamp& aStartOffset);
 
   void Cancel() override;
 
   /**
    * Notifies us that we have reached the end of the stream and no more data
    * will be appended to mIncomingBuffer.
    */
   void NotifyEndOfStream() override;
 
   /**
-   * Dispatched from MediaStreamGraph when it has run an iteration where the
-   * input track of the track this TrackEncoder is associated with didn't have
-   * any data.
-   *
-   * Since we use a direct listener for VideoSegments we miss periods of time
-   * for which the source didn't have any data. This ensures that the latest
-   * frame gets displayed while we wait for more data to be pushed.
-   */
-  void AdvanceBlockedInput(StreamTime aDuration);
-
-  /**
    * Dispatched from MediaStreamGraph when it has run an iteration so we can
    * hand more data to the encoder.
    */
-  void AdvanceCurrentTime(StreamTime aDuration);
+  void AdvanceCurrentTime(const TimeStamp& aTime);
 
   /**
    * Set desired keyframe interval defined in milliseconds.
    */
   void SetKeyFrameInterval(int32_t aKeyFrameInterval);
 
  protected:
   /**
@@ -454,18 +451,18 @@ class VideoTrackEncoder : public TrackEn
    * The last unique frame and duration so far handled by
    * NotifyAdvanceCurrentTime. When a new frame is detected, mLastChunk is added
    * to mOutgoingBuffer.
    */
   VideoChunk mLastChunk;
 
   /**
    * A segment queue of incoming video track data, from listeners.
-   * The duration of mIncomingBuffer is strictly increasing as it gets fed more
-   * data. Consumed data is replaced by null data.
+   * The duration of mIncomingBuffer is irrelevant as we only look at TimeStamps
+   * of frames. Consumed data is replaced by null data.
    */
   VideoSegment mIncomingBuffer;
 
   /**
    * A segment queue of outgoing video track data to the encoder.
    * The contents of mOutgoingBuffer will always be what has been consumed from
    * mIncomingBuffer (up to mCurrentTime) but not yet consumed by the encoder
    * sub class. There won't be any null data at the beginning of mOutgoingBuffer
@@ -474,22 +471,24 @@ class VideoTrackEncoder : public TrackEn
   VideoSegment mOutgoingBuffer;
 
   /**
    * The number of mTrackRate ticks we have passed to mOutgoingBuffer.
    */
   StreamTime mEncodedTicks;
 
   /**
-   * The latest current time reported to us from the MSG.
+   * The time up to which we have forwarded data from mIncomingBuffer to
+   * mOutgoingBuffer.
    */
-  StreamTime mCurrentTime;
+  TimeStamp mCurrentTime;
 
   /**
-   * The time of the first real video frame passed to mOutgoingBuffer (at t=0).
+   * The time the video track started, so the start of the video track can be
+   * synced to the start of the audio track.
    *
    * Note that this time will progress during suspension, to make sure the
    * incoming frames stay in sync with the output.
    */
   TimeStamp mStartTime;
 
   /**
    * The time Suspend was called on the MediaRecorder, so we can calculate the
--- a/dom/media/gtest/TestVideoTrackEncoder.cpp
+++ b/dom/media/gtest/TestVideoTrackEncoder.cpp
@@ -276,45 +276,45 @@ TEST(VP8VideoTrackEncoder, FrameEncode) 
   images.AppendElement(generator.GenerateNV21Image());
 
   // Put generated YUV frame into video segment.
   // Duration of each frame is 1 second.
   VideoSegment segment;
   TimeStamp now = TimeStamp::Now();
   for (nsTArray<RefPtr<Image>>::size_type i = 0; i < images.Length(); i++) {
     RefPtr<Image> image = images[i];
-    segment.AppendFrame(image.forget(), mozilla::StreamTime(VIDEO_TRACK_RATE),
-                        generator.GetSize(), PRINCIPAL_HANDLE_NONE, false,
+    segment.AppendFrame(image.forget(), 1, generator.GetSize(),
+                        PRINCIPAL_HANDLE_NONE, false,
                         now + TimeDuration::FromSeconds(i));
   }
 
-  encoder.SetStartOffset(0);
+  encoder.SetStartOffset(now);
   encoder.AppendVideoSegment(std::move(segment));
-  encoder.AdvanceCurrentTime(images.Length() * VIDEO_TRACK_RATE);
+  encoder.AdvanceCurrentTime(now + TimeDuration::FromSeconds(images.Length()));
 
   // Pull Encoded Data back from encoder.
   EncodedFrameContainer container;
   EXPECT_TRUE(NS_SUCCEEDED(encoder.GetEncodedTrack(container)));
 }
 
 // Test that encoding a single frame gives useful output.
 TEST(VP8VideoTrackEncoder, SingleFrameEncode) {
   TestVP8TrackEncoder encoder;
 
   // Pass a half-second frame to the encoder.
   YUVBufferGenerator generator;
   generator.Init(mozilla::gfx::IntSize(640, 480));
   VideoSegment segment;
-  segment.AppendFrame(generator.GenerateI420Image(),
-                      mozilla::StreamTime(VIDEO_TRACK_RATE / 2),  // 1/2 second
-                      generator.GetSize(), PRINCIPAL_HANDLE_NONE);
+  TimeStamp now = TimeStamp::Now();
+  segment.AppendFrame(generator.GenerateI420Image(), 1, generator.GetSize(),
+                      PRINCIPAL_HANDLE_NONE, false, now);
 
-  encoder.SetStartOffset(0);
+  encoder.SetStartOffset(now);
   encoder.AppendVideoSegment(std::move(segment));
-  encoder.AdvanceCurrentTime(VIDEO_TRACK_RATE / 2);
+  encoder.AdvanceCurrentTime(now + TimeDuration::FromSeconds(0.5));
   encoder.NotifyEndOfStream();
 
   EncodedFrameContainer container;
   ASSERT_TRUE(NS_SUCCEEDED(encoder.GetEncodedTrack(container)));
 
   EXPECT_TRUE(encoder.IsEncodingComplete());
 
   // Read out encoded data, and verify.
@@ -335,103 +335,59 @@ TEST(VP8VideoTrackEncoder, SameFrameEnco
 
   // Pass 15 100ms frames to the encoder.
   YUVBufferGenerator generator;
   generator.Init(mozilla::gfx::IntSize(640, 480));
   RefPtr<Image> image = generator.GenerateI420Image();
   TimeStamp now = TimeStamp::Now();
   VideoSegment segment;
   for (uint32_t i = 0; i < 15; ++i) {
-    segment.AppendFrame(do_AddRef(image),
-                        mozilla::StreamTime(VIDEO_TRACK_RATE / 10),  // 100ms
-                        generator.GetSize(), PRINCIPAL_HANDLE_NONE, false,
+    segment.AppendFrame(do_AddRef(image), 1, generator.GetSize(),
+                        PRINCIPAL_HANDLE_NONE, false,
                         now + TimeDuration::FromSeconds(i * 0.1));
   }
 
-  encoder.SetStartOffset(0);
+  encoder.SetStartOffset(now);
   encoder.AppendVideoSegment(std::move(segment));
-  encoder.AdvanceCurrentTime((VIDEO_TRACK_RATE / 10) * 15);
+  encoder.AdvanceCurrentTime(now + TimeDuration::FromSeconds(1.5));
   encoder.NotifyEndOfStream();
 
   EncodedFrameContainer container;
   ASSERT_TRUE(NS_SUCCEEDED(encoder.GetEncodedTrack(container)));
 
   EXPECT_TRUE(encoder.IsEncodingComplete());
 
   // Verify total duration being 1.5s.
   uint64_t totalDuration = 0;
   for (auto& frame : container.GetEncodedFrames()) {
     totalDuration += frame->GetDuration();
   }
   const uint64_t oneAndAHalf = (PR_USEC_PER_SEC / 2) * 3;
   EXPECT_EQ(oneAndAHalf, totalDuration);
 }
 
-// Test encoding a track that starts with null data
-TEST(VP8VideoTrackEncoder, NullFrameFirst) {
-  TestVP8TrackEncoder encoder;
-  YUVBufferGenerator generator;
-  generator.Init(mozilla::gfx::IntSize(640, 480));
-  RefPtr<Image> image = generator.GenerateI420Image();
-  TimeStamp now = TimeStamp::Now();
-  VideoSegment segment;
-
-  // Pass 2 100ms null frames to the encoder.
-  for (uint32_t i = 0; i < 2; ++i) {
-    segment.AppendFrame(nullptr,
-                        mozilla::StreamTime(VIDEO_TRACK_RATE / 10),  // 100ms
-                        generator.GetSize(), PRINCIPAL_HANDLE_NONE, false,
-                        now + TimeDuration::FromSeconds(i * 0.1));
-  }
-
-  // Pass a real 100ms frame to the encoder.
-  segment.AppendFrame(image.forget(),
-                      mozilla::StreamTime(VIDEO_TRACK_RATE / 10),  // 100ms
-                      generator.GetSize(), PRINCIPAL_HANDLE_NONE, false,
-                      now + TimeDuration::FromSeconds(0.3));
-
-  encoder.SetStartOffset(0);
-  encoder.AppendVideoSegment(std::move(segment));
-  encoder.AdvanceCurrentTime(3 * VIDEO_TRACK_RATE / 10);
-  encoder.NotifyEndOfStream();
-
-  EncodedFrameContainer container;
-  ASSERT_TRUE(NS_SUCCEEDED(encoder.GetEncodedTrack(container)));
-
-  EXPECT_TRUE(encoder.IsEncodingComplete());
-
-  // Verify total duration being 0.3s.
-  uint64_t totalDuration = 0;
-  for (auto& frame : container.GetEncodedFrames()) {
-    totalDuration += frame->GetDuration();
-  }
-  const uint64_t pointThree = (PR_USEC_PER_SEC / 10) * 3;
-  EXPECT_EQ(pointThree, totalDuration);
-}
-
 // Test encoding a track that has to skip frames.
 TEST(VP8VideoTrackEncoder, SkippedFrames) {
   TestVP8TrackEncoder encoder;
   YUVBufferGenerator generator;
   generator.Init(mozilla::gfx::IntSize(640, 480));
   TimeStamp now = TimeStamp::Now();
   VideoSegment segment;
 
   // Pass 100 frames of the shortest possible duration where we don't get
   // rounding errors between input/output rate.
   for (uint32_t i = 0; i < 100; ++i) {
-    segment.AppendFrame(generator.GenerateI420Image(),
-                        mozilla::StreamTime(90),  // 1ms
-                        generator.GetSize(), PRINCIPAL_HANDLE_NONE, false,
+    segment.AppendFrame(generator.GenerateI420Image(), 1, generator.GetSize(),
+                        PRINCIPAL_HANDLE_NONE, false,
                         now + TimeDuration::FromMilliseconds(i));
   }
 
-  encoder.SetStartOffset(0);
+  encoder.SetStartOffset(now);
   encoder.AppendVideoSegment(std::move(segment));
-  encoder.AdvanceCurrentTime(100 * 90);
+  encoder.AdvanceCurrentTime(now + TimeDuration::FromMilliseconds(100));
   encoder.NotifyEndOfStream();
 
   EncodedFrameContainer container;
   ASSERT_TRUE(NS_SUCCEEDED(encoder.GetEncodedTrack(container)));
 
   EXPECT_TRUE(encoder.IsEncodingComplete());
 
   // Verify total duration being 100 * 1ms = 100ms.
@@ -450,31 +406,29 @@ TEST(VP8VideoTrackEncoder, RoundingError
   generator.Init(mozilla::gfx::IntSize(640, 480));
   TimeStamp now = TimeStamp::Now();
   VideoSegment segment;
 
   // Pass nine frames with timestamps not expressable in 90kHz sample rate,
   // then one frame to make the total duration one second.
   uint32_t usPerFrame = 99999;  // 99.999ms
   for (uint32_t i = 0; i < 9; ++i) {
-    segment.AppendFrame(generator.GenerateI420Image(),
-                        mozilla::StreamTime(VIDEO_TRACK_RATE / 10),  // 100ms
-                        generator.GetSize(), PRINCIPAL_HANDLE_NONE, false,
+    segment.AppendFrame(generator.GenerateI420Image(), 1, generator.GetSize(),
+                        PRINCIPAL_HANDLE_NONE, false,
                         now + TimeDuration::FromMicroseconds(i * usPerFrame));
   }
 
   // This last frame has timestamp start + 0.9s and duration 0.1s.
-  segment.AppendFrame(generator.GenerateI420Image(),
-                      mozilla::StreamTime(VIDEO_TRACK_RATE / 10),  // 100ms
-                      generator.GetSize(), PRINCIPAL_HANDLE_NONE, false,
+  segment.AppendFrame(generator.GenerateI420Image(), 1, generator.GetSize(),
+                      PRINCIPAL_HANDLE_NONE, false,
                       now + TimeDuration::FromSeconds(0.9));
 
-  encoder.SetStartOffset(0);
+  encoder.SetStartOffset(now);
   encoder.AppendVideoSegment(std::move(segment));
-  encoder.AdvanceCurrentTime(10 * VIDEO_TRACK_RATE / 10);
+  encoder.AdvanceCurrentTime(now + TimeDuration::FromSeconds(1));
   encoder.NotifyEndOfStream();
 
   EncodedFrameContainer container;
   ASSERT_TRUE(NS_SUCCEEDED(encoder.GetEncodedTrack(container)));
 
   EXPECT_TRUE(encoder.IsEncodingComplete());
 
   // Verify total duration being 1s.
@@ -485,94 +439,135 @@ TEST(VP8VideoTrackEncoder, RoundingError
   const uint64_t oneSecond = PR_USEC_PER_SEC;
   EXPECT_EQ(oneSecond, totalDuration);
 }
 
 // Test that we're encoding timestamps rather than durations.
 TEST(VP8VideoTrackEncoder, TimestampFrameEncode) {
   TestVP8TrackEncoder encoder;
 
-  // Pass 3 frames with duration 0.1s, but varying timestamps to the encoder.
-  // Total duration of the segment should be the same for both.
   YUVBufferGenerator generator;
   generator.Init(mozilla::gfx::IntSize(640, 480));
   TimeStamp now = TimeStamp::Now();
   VideoSegment segment;
-  segment.AppendFrame(generator.GenerateI420Image(),
-                      mozilla::StreamTime(VIDEO_TRACK_RATE / 10),  // 0.1s
-                      generator.GetSize(), PRINCIPAL_HANDLE_NONE, false, now);
-  segment.AppendFrame(generator.GenerateI420Image(),
-                      mozilla::StreamTime(VIDEO_TRACK_RATE / 10),  // 0.1s
-                      generator.GetSize(), PRINCIPAL_HANDLE_NONE, false,
+  segment.AppendFrame(generator.GenerateI420Image(), 1, generator.GetSize(),
+                      PRINCIPAL_HANDLE_NONE, false, now);
+  segment.AppendFrame(generator.GenerateI420Image(), 1, generator.GetSize(),
+                      PRINCIPAL_HANDLE_NONE, false,
                       now + TimeDuration::FromSeconds(0.05));
-  segment.AppendFrame(generator.GenerateI420Image(),
-                      mozilla::StreamTime(VIDEO_TRACK_RATE / 10),  // 0.1s
-                      generator.GetSize(), PRINCIPAL_HANDLE_NONE, false,
+  segment.AppendFrame(generator.GenerateI420Image(), 1, generator.GetSize(),
+                      PRINCIPAL_HANDLE_NONE, false,
                       now + TimeDuration::FromSeconds(0.2));
 
-  encoder.SetStartOffset(0);
+  encoder.SetStartOffset(now);
   encoder.AppendVideoSegment(std::move(segment));
-  encoder.AdvanceCurrentTime(3 * VIDEO_TRACK_RATE / 10);
+  encoder.AdvanceCurrentTime(now + TimeDuration::FromSeconds(0.3));
   encoder.NotifyEndOfStream();
 
   EncodedFrameContainer container;
   ASSERT_TRUE(NS_SUCCEEDED(encoder.GetEncodedTrack(container)));
 
   EXPECT_TRUE(encoder.IsEncodingComplete());
 
-  // Verify total duration being 4s and individual frames being [0.5s, 1.5s, 1s,
-  // 1s]
+  // Verify total duration being 0.3s and individual frames being [0.05s, 0.15s,
+  // 0.1s]
   uint64_t expectedDurations[] = {(PR_USEC_PER_SEC / 10) / 2,
                                   (PR_USEC_PER_SEC / 10) * 3 / 2,
                                   (PR_USEC_PER_SEC / 10)};
   uint64_t totalDuration = 0;
   size_t i = 0;
   for (auto& frame : container.GetEncodedFrames()) {
     EXPECT_EQ(expectedDurations[i++], frame->GetDuration());
     totalDuration += frame->GetDuration();
   }
   const uint64_t pointThree = (PR_USEC_PER_SEC / 10) * 3;
   EXPECT_EQ(pointThree, totalDuration);
 }
 
+// Test that we're compensating for drift when encoding.
+TEST(VP8VideoTrackEncoder, DriftingFrameEncode) {
+  TestVP8TrackEncoder encoder;
+
+  YUVBufferGenerator generator;
+  generator.Init(mozilla::gfx::IntSize(640, 480));
+  TimeStamp now = TimeStamp::Now();
+
+  // Set up major drift -- audio that goes twice as fast as video.
+  // This should make the given video durations double as they get encoded.
+  EXPECT_CALL(*encoder.DriftCompensator(), GetVideoTime(_, _))
+      .WillRepeatedly(Invoke(
+          [&](TimeStamp, TimeStamp aTime) { return now + (aTime - now) * 2; }));
+
+  VideoSegment segment;
+  segment.AppendFrame(generator.GenerateI420Image(), 1, generator.GetSize(),
+                      PRINCIPAL_HANDLE_NONE, false, now);
+  segment.AppendFrame(generator.GenerateI420Image(), 1, generator.GetSize(),
+                      PRINCIPAL_HANDLE_NONE, false,
+                      now + TimeDuration::FromSeconds(0.05));
+  segment.AppendFrame(generator.GenerateI420Image(), 1, generator.GetSize(),
+                      PRINCIPAL_HANDLE_NONE, false,
+                      now + TimeDuration::FromSeconds(0.2));
+
+  encoder.SetStartOffset(now);
+  encoder.AppendVideoSegment(std::move(segment));
+  encoder.AdvanceCurrentTime(now + TimeDuration::FromSeconds(0.3));
+  encoder.NotifyEndOfStream();
+
+  EncodedFrameContainer container;
+  ASSERT_TRUE(NS_SUCCEEDED(encoder.GetEncodedTrack(container)));
+
+  EXPECT_TRUE(encoder.IsEncodingComplete());
+
+  // Verify total duration being 0.6s and individual frames being [0.1s, 0.3s,
+  // 0.2s]
+  uint64_t expectedDurations[] = {(PR_USEC_PER_SEC / 10),
+                                  (PR_USEC_PER_SEC / 10) * 3,
+                                  (PR_USEC_PER_SEC / 10) * 2};
+  uint64_t totalDuration = 0;
+  size_t i = 0;
+  for (auto& frame : container.GetEncodedFrames()) {
+    EXPECT_EQ(expectedDurations[i++], frame->GetDuration());
+    totalDuration += frame->GetDuration();
+  }
+  const uint64_t pointSix = (PR_USEC_PER_SEC / 10) * 6;
+  EXPECT_EQ(pointSix, totalDuration);
+}
+
 // Test that suspending an encoding works.
 TEST(VP8VideoTrackEncoder, Suspended) {
   TestVP8TrackEncoder encoder;
 
   // Pass 3 frames with duration 0.1s. We suspend before and resume after the
   // second frame.
   YUVBufferGenerator generator;
   generator.Init(mozilla::gfx::IntSize(640, 480));
   TimeStamp now = TimeStamp::Now();
   VideoSegment segment;
-  segment.AppendFrame(generator.GenerateI420Image(),
-                      mozilla::StreamTime(VIDEO_TRACK_RATE / 10),  // 0.1s
-                      generator.GetSize(), PRINCIPAL_HANDLE_NONE, false, now);
+  segment.AppendFrame(generator.GenerateI420Image(), 1, generator.GetSize(),
+                      PRINCIPAL_HANDLE_NONE, false, now);
 
-  encoder.SetStartOffset(0);
+  encoder.SetStartOffset(now);
   encoder.AppendVideoSegment(std::move(segment));
-  encoder.AdvanceCurrentTime(VIDEO_TRACK_RATE / 10);
+  encoder.AdvanceCurrentTime(now + TimeDuration::FromSeconds(0.1));
 
   encoder.Suspend(now + TimeDuration::FromSeconds(0.1));
 
-  segment.AppendFrame(generator.GenerateI420Image(),
-                      mozilla::StreamTime(VIDEO_TRACK_RATE / 10),  // 0.1s
-                      generator.GetSize(), PRINCIPAL_HANDLE_NONE, false,
+  segment.AppendFrame(generator.GenerateI420Image(), 1, generator.GetSize(),
+                      PRINCIPAL_HANDLE_NONE, false,
                       now + TimeDuration::FromSeconds(0.1));
   encoder.AppendVideoSegment(std::move(segment));
-  encoder.AdvanceCurrentTime(VIDEO_TRACK_RATE / 10);
+  encoder.AdvanceCurrentTime(now + TimeDuration::FromSeconds(0.2));
 
   encoder.Resume(now + TimeDuration::FromSeconds(0.2));
 
-  segment.AppendFrame(generator.GenerateI420Image(),
-                      mozilla::StreamTime(VIDEO_TRACK_RATE / 10),  // 0.1s
-                      generator.GetSize(), PRINCIPAL_HANDLE_NONE, false,
+  segment.AppendFrame(generator.GenerateI420Image(), 1, generator.GetSize(),
+                      PRINCIPAL_HANDLE_NONE, false,
                       now + TimeDuration::FromSeconds(0.2));
   encoder.AppendVideoSegment(std::move(segment));
-  encoder.AdvanceCurrentTime(VIDEO_TRACK_RATE / 10);
+  encoder.AdvanceCurrentTime(now + TimeDuration::FromSeconds(0.3));
 
   encoder.NotifyEndOfStream();
 
   EncodedFrameContainer container;
   ASSERT_TRUE(NS_SUCCEEDED(encoder.GetEncodedTrack(container)));
 
   EXPECT_TRUE(encoder.IsEncodingComplete());
 
@@ -592,32 +587,30 @@ TEST(VP8VideoTrackEncoder, Suspended) {
 TEST(VP8VideoTrackEncoder, SuspendedUntilEnd) {
   TestVP8TrackEncoder encoder;
 
   // Pass 2 frames with duration 0.1s. We suspend before the second frame.
   YUVBufferGenerator generator;
   generator.Init(mozilla::gfx::IntSize(640, 480));
   TimeStamp now = TimeStamp::Now();
   VideoSegment segment;
-  segment.AppendFrame(generator.GenerateI420Image(),
-                      mozilla::StreamTime(VIDEO_TRACK_RATE / 10),  // 0.1s
-                      generator.GetSize(), PRINCIPAL_HANDLE_NONE, false, now);
+  segment.AppendFrame(generator.GenerateI420Image(), 1, generator.GetSize(),
+                      PRINCIPAL_HANDLE_NONE, false, now);
 
-  encoder.SetStartOffset(0);
+  encoder.SetStartOffset(now);
   encoder.AppendVideoSegment(std::move(segment));
-  encoder.AdvanceCurrentTime(VIDEO_TRACK_RATE / 10);
+  encoder.AdvanceCurrentTime(now + TimeDuration::FromSeconds(0.1));
 
   encoder.Suspend(now + TimeDuration::FromSeconds(0.1));
 
-  segment.AppendFrame(generator.GenerateI420Image(),
-                      mozilla::StreamTime(VIDEO_TRACK_RATE / 10),  // 0.1s
-                      generator.GetSize(), PRINCIPAL_HANDLE_NONE, false,
+  segment.AppendFrame(generator.GenerateI420Image(), 1, generator.GetSize(),
+                      PRINCIPAL_HANDLE_NONE, false,
                       now + TimeDuration::FromSeconds(0.1));
   encoder.AppendVideoSegment(std::move(segment));
-  encoder.AdvanceCurrentTime(VIDEO_TRACK_RATE / 10);
+  encoder.AdvanceCurrentTime(now + TimeDuration::FromSeconds(0.2));
 
   encoder.NotifyEndOfStream();
 
   EncodedFrameContainer container;
   ASSERT_TRUE(NS_SUCCEEDED(encoder.GetEncodedTrack(container)));
 
   EXPECT_TRUE(encoder.IsEncodingComplete());
 
@@ -641,63 +634,60 @@ TEST(VP8VideoTrackEncoder, AlwaysSuspend
   YUVBufferGenerator generator;
   generator.Init(mozilla::gfx::IntSize(640, 480));
 
   TimeStamp now = TimeStamp::Now();
 
   encoder.Suspend(now);
 
   VideoSegment segment;
-  segment.AppendFrame(generator.GenerateI420Image(),
-                      mozilla::StreamTime(2 * VIDEO_TRACK_RATE),  // 2s
-                      generator.GetSize(), PRINCIPAL_HANDLE_NONE, false, now);
+  segment.AppendFrame(generator.GenerateI420Image(), 1, generator.GetSize(),
+                      PRINCIPAL_HANDLE_NONE, false, now);
 
-  encoder.SetStartOffset(0);
+  encoder.SetStartOffset(now);
   encoder.AppendVideoSegment(std::move(segment));
-  encoder.AdvanceCurrentTime(2 * VIDEO_TRACK_RATE);
+  encoder.AdvanceCurrentTime(now + TimeDuration::FromSeconds(2));
 
   encoder.NotifyEndOfStream();
 
   EncodedFrameContainer container;
   ASSERT_TRUE(NS_SUCCEEDED(encoder.GetEncodedTrack(container)));
 
   EXPECT_TRUE(encoder.IsEncodingComplete());
 
-  // Verify that we have one encoded frames and a total duration of 0.1s.
+  // Verify that we have no encoded frames.
   const uint64_t none = 0;
   EXPECT_EQ(none, container.GetEncodedFrames().Length());
 }
 
 // Test that encoding a track that is suspended in the beginning works.
 TEST(VP8VideoTrackEncoder, SuspendedBeginning) {
   TestVP8TrackEncoder encoder;
   TimeStamp now = TimeStamp::Now();
 
   // Suspend and pass a frame with duration 0.5s. Then resume and pass one more.
   encoder.Suspend(now);
 
   YUVBufferGenerator generator;
   generator.Init(mozilla::gfx::IntSize(640, 480));
   VideoSegment segment;
-  segment.AppendFrame(generator.GenerateI420Image(),
-                      mozilla::StreamTime(VIDEO_TRACK_RATE / 2),  // 0.5s
-                      generator.GetSize(), PRINCIPAL_HANDLE_NONE, false, now);
+  segment.AppendFrame(generator.GenerateI420Image(), 1, generator.GetSize(),
+                      PRINCIPAL_HANDLE_NONE, false, now);
 
-  encoder.SetStartOffset(0);
+  encoder.SetStartOffset(now);
   encoder.AppendVideoSegment(std::move(segment));
-  encoder.AdvanceCurrentTime(VIDEO_TRACK_RATE / 2);
+  encoder.AdvanceCurrentTime(now + TimeDuration::FromSeconds(0.5));
 
   encoder.Resume(now + TimeDuration::FromSeconds(0.5));
 
-  segment.AppendFrame(generator.GenerateI420Image(),
-                      mozilla::StreamTime(VIDEO_TRACK_RATE / 2),  // 0.5s
-                      generator.GetSize(), PRINCIPAL_HANDLE_NONE, false,
+  segment.AppendFrame(generator.GenerateI420Image(), 1, generator.GetSize(),
+                      PRINCIPAL_HANDLE_NONE, false,
                       now + TimeDuration::FromSeconds(0.5));
   encoder.AppendVideoSegment(std::move(segment));
-  encoder.AdvanceCurrentTime(VIDEO_TRACK_RATE / 2);
+  encoder.AdvanceCurrentTime(now + TimeDuration::FromSeconds(1));
 
   encoder.NotifyEndOfStream();
 
   EncodedFrameContainer container;
   ASSERT_TRUE(NS_SUCCEEDED(encoder.GetEncodedTrack(container)));
 
   EXPECT_TRUE(encoder.IsEncodingComplete());
 
@@ -718,35 +708,33 @@ TEST(VP8VideoTrackEncoder, SuspendedBegi
 TEST(VP8VideoTrackEncoder, SuspendedOverlap) {
   TestVP8TrackEncoder encoder;
 
   // Pass a 1s frame and suspend after 0.5s.
   YUVBufferGenerator generator;
   generator.Init(mozilla::gfx::IntSize(640, 480));
   TimeStamp now = TimeStamp::Now();
   VideoSegment segment;
-  segment.AppendFrame(generator.GenerateI420Image(),
-                      mozilla::StreamTime(VIDEO_TRACK_RATE),  // 1s
-                      generator.GetSize(), PRINCIPAL_HANDLE_NONE, false, now);
+  segment.AppendFrame(generator.GenerateI420Image(), 1, generator.GetSize(),
+                      PRINCIPAL_HANDLE_NONE, false, now);
 
-  encoder.SetStartOffset(0);
+  encoder.SetStartOffset(now);
   encoder.AppendVideoSegment(std::move(segment));
 
-  encoder.AdvanceCurrentTime(VIDEO_TRACK_RATE / 2);
+  encoder.AdvanceCurrentTime(now + TimeDuration::FromSeconds(0.5));
   encoder.Suspend(now + TimeDuration::FromSeconds(0.5));
 
   // Pass another 1s frame and resume after 0.3 of this new frame.
-  segment.AppendFrame(generator.GenerateI420Image(),
-                      mozilla::StreamTime(VIDEO_TRACK_RATE),  // 1s
-                      generator.GetSize(), PRINCIPAL_HANDLE_NONE, false,
+  segment.AppendFrame(generator.GenerateI420Image(), 1, generator.GetSize(),
+                      PRINCIPAL_HANDLE_NONE, false,
                       now + TimeDuration::FromSeconds(1));
   encoder.AppendVideoSegment(std::move(segment));
-  encoder.AdvanceCurrentTime((VIDEO_TRACK_RATE / 10) * 8);
+  encoder.AdvanceCurrentTime(now + TimeDuration::FromSeconds(1.3));
   encoder.Resume(now + TimeDuration::FromSeconds(1.3));
-  encoder.AdvanceCurrentTime((VIDEO_TRACK_RATE / 10) * 7);
+  encoder.AdvanceCurrentTime(now + TimeDuration::FromSeconds(2));
 
   encoder.NotifyEndOfStream();
 
   EncodedFrameContainer container;
   ASSERT_TRUE(NS_SUCCEEDED(encoder.GetEncodedTrack(container)));
 
   EXPECT_TRUE(encoder.IsEncodingComplete());
 
@@ -766,23 +754,22 @@ TEST(VP8VideoTrackEncoder, SuspendedOver
 TEST(VP8VideoTrackEncoder, PrematureEnding) {
   TestVP8TrackEncoder encoder;
 
   // Pass a 1s frame and end the track after 0.5s.
   YUVBufferGenerator generator;
   generator.Init(mozilla::gfx::IntSize(640, 480));
   TimeStamp now = TimeStamp::Now();
   VideoSegment segment;
-  segment.AppendFrame(generator.GenerateI420Image(),
-                      mozilla::StreamTime(VIDEO_TRACK_RATE),  // 1s
-                      generator.GetSize(), PRINCIPAL_HANDLE_NONE, false, now);
+  segment.AppendFrame(generator.GenerateI420Image(), 1, generator.GetSize(),
+                      PRINCIPAL_HANDLE_NONE, false, now);
 
-  encoder.SetStartOffset(0);
+  encoder.SetStartOffset(now);
   encoder.AppendVideoSegment(std::move(segment));
-  encoder.AdvanceCurrentTime(VIDEO_TRACK_RATE / 2);
+  encoder.AdvanceCurrentTime(now + TimeDuration::FromSeconds(0.5));
   encoder.NotifyEndOfStream();
 
   EncodedFrameContainer container;
   ASSERT_TRUE(NS_SUCCEEDED(encoder.GetEncodedTrack(container)));
 
   EXPECT_TRUE(encoder.IsEncodingComplete());
 
   uint64_t totalDuration = 0;
@@ -798,23 +785,22 @@ TEST(VP8VideoTrackEncoder, DelayedStart)
   TestVP8TrackEncoder encoder;
 
   // Pass a 2s frame, start (pass first CurrentTime) at 0.5s, end at 1s.
   // Should result in a 0.5s encoding.
   YUVBufferGenerator generator;
   generator.Init(mozilla::gfx::IntSize(640, 480));
   TimeStamp now = TimeStamp::Now();
   VideoSegment segment;
-  segment.AppendFrame(generator.GenerateI420Image(),
-                      mozilla::StreamTime(2 * VIDEO_TRACK_RATE),  // 2s
-                      generator.GetSize(), PRINCIPAL_HANDLE_NONE, false, now);
+  segment.AppendFrame(generator.GenerateI420Image(), 1, generator.GetSize(),
+                      PRINCIPAL_HANDLE_NONE, false, now);
 
-  encoder.SetStartOffset(VIDEO_TRACK_RATE / 2);
+  encoder.SetStartOffset(now + TimeDuration::FromSeconds(0.5));
   encoder.AppendVideoSegment(std::move(segment));
-  encoder.AdvanceCurrentTime(VIDEO_TRACK_RATE / 2);
+  encoder.AdvanceCurrentTime(now + TimeDuration::FromSeconds(1));
   encoder.NotifyEndOfStream();
 
   EncodedFrameContainer container;
   ASSERT_TRUE(NS_SUCCEEDED(encoder.GetEncodedTrack(container)));
 
   EXPECT_TRUE(encoder.IsEncodingComplete());
 
   uint64_t totalDuration = 0;
@@ -831,23 +817,22 @@ TEST(VP8VideoTrackEncoder, DelayedStartO
   TestVP8TrackEncoder encoder;
 
   // Pass a 2s frame, start (pass first CurrentTime) at 0.5s, end at 1s.
   // Should result in a 0.5s encoding.
   YUVBufferGenerator generator;
   generator.Init(mozilla::gfx::IntSize(640, 480));
   TimeStamp now = TimeStamp::Now();
   VideoSegment segment;
-  segment.AppendFrame(generator.GenerateI420Image(),
-                      mozilla::StreamTime(2 * VIDEO_TRACK_RATE),  // 2s
-                      generator.GetSize(), PRINCIPAL_HANDLE_NONE, false, now);
+  segment.AppendFrame(generator.GenerateI420Image(), 1, generator.GetSize(),
+                      PRINCIPAL_HANDLE_NONE, false, now);
 
   encoder.AppendVideoSegment(std::move(segment));
-  encoder.SetStartOffset(VIDEO_TRACK_RATE / 2);
-  encoder.AdvanceCurrentTime(VIDEO_TRACK_RATE / 2);
+  encoder.SetStartOffset(now + TimeDuration::FromSeconds(0.5));
+  encoder.AdvanceCurrentTime(now + TimeDuration::FromSeconds(1));
   encoder.NotifyEndOfStream();
 
   EncodedFrameContainer container;
   ASSERT_TRUE(NS_SUCCEEDED(encoder.GetEncodedTrack(container)));
 
   EXPECT_TRUE(encoder.IsEncodingComplete());
 
   uint64_t totalDuration = 0;
@@ -863,84 +848,124 @@ TEST(VP8VideoTrackEncoder, VeryDelayedSt
   TestVP8TrackEncoder encoder;
 
   // Pass a 1s frame, start (pass first CurrentTime) at 10s, end at 10.5s.
   // Should result in a 0.5s encoding.
   YUVBufferGenerator generator;
   generator.Init(mozilla::gfx::IntSize(640, 480));
   TimeStamp now = TimeStamp::Now();
   VideoSegment segment;
-  segment.AppendFrame(generator.GenerateI420Image(),
-                      mozilla::StreamTime(VIDEO_TRACK_RATE),  // 1s
-                      generator.GetSize(), PRINCIPAL_HANDLE_NONE, false, now);
+  segment.AppendFrame(generator.GenerateI420Image(), 1, generator.GetSize(),
+                      PRINCIPAL_HANDLE_NONE, false, now);
 
-  encoder.SetStartOffset(VIDEO_TRACK_RATE * 10);
+  encoder.SetStartOffset(now + TimeDuration::FromSeconds(10));
   encoder.AppendVideoSegment(std::move(segment));
-  encoder.AdvanceCurrentTime(VIDEO_TRACK_RATE / 2);
+  encoder.AdvanceCurrentTime(now + TimeDuration::FromSeconds(10.5));
   encoder.NotifyEndOfStream();
 
   EncodedFrameContainer container;
   ASSERT_TRUE(NS_SUCCEEDED(encoder.GetEncodedTrack(container)));
 
   EXPECT_TRUE(encoder.IsEncodingComplete());
 
   uint64_t totalDuration = 0;
   for (auto& frame : container.GetEncodedFrames()) {
     totalDuration += frame->GetDuration();
   }
   const uint64_t half = PR_USEC_PER_SEC / 2;
   EXPECT_EQ(half, totalDuration);
 }
 
+// Test that a video frame that hangs around for a long time gets encoded every
+// second.
+TEST(VP8VideoTrackEncoder, LongFramesReEncoded) {
+  TestVP8TrackEncoder encoder;
+
+  // Pass a frame at t=0 and start encoding.
+  // Advancing the current time by 1.5s should encode a 1s frame.
+  // Advancing the current time by another 9.5s should encode another 10 1s
+  // frames.
+  YUVBufferGenerator generator;
+  generator.Init(mozilla::gfx::IntSize(640, 480));
+  TimeStamp now = TimeStamp::Now();
+  VideoSegment segment;
+  segment.AppendFrame(generator.GenerateI420Image(), 1, generator.GetSize(),
+                      PRINCIPAL_HANDLE_NONE, false, now);
+
+  encoder.SetStartOffset(now);
+  encoder.AppendVideoSegment(std::move(segment));
+
+  {
+    encoder.AdvanceCurrentTime(now + TimeDuration::FromSeconds(1.5));
+
+    EncodedFrameContainer container;
+    ASSERT_TRUE(NS_SUCCEEDED(encoder.GetEncodedTrack(container)));
+    EXPECT_FALSE(encoder.IsEncodingComplete());
+
+    uint64_t totalDuration = 0;
+    for (auto& frame : container.GetEncodedFrames()) {
+      totalDuration += frame->GetDuration();
+    }
+    const uint64_t oneSec = PR_USEC_PER_SEC;
+    EXPECT_EQ(oneSec, totalDuration);
+    EXPECT_EQ(1U, container.GetEncodedFrames().Length());
+  }
+
+  {
+    encoder.AdvanceCurrentTime(now + TimeDuration::FromSeconds(11));
+    encoder.NotifyEndOfStream();
+
+    EncodedFrameContainer container;
+    ASSERT_TRUE(NS_SUCCEEDED(encoder.GetEncodedTrack(container)));
+    EXPECT_TRUE(encoder.IsEncodingComplete());
+
+    uint64_t totalDuration = 0;
+    for (auto& frame : container.GetEncodedFrames()) {
+      totalDuration += frame->GetDuration();
+    }
+    const uint64_t tenSec = PR_USEC_PER_SEC * 10;
+    EXPECT_EQ(tenSec, totalDuration);
+    EXPECT_EQ(10U, container.GetEncodedFrames().Length());
+  }
+}
+
 // Test that an encoding with a defined key frame interval encodes keyframes
 // as expected. Short here means shorter than the default (1s).
 TEST(VP8VideoTrackEncoder, ShortKeyFrameInterval) {
   TestVP8TrackEncoder encoder;
 
   // Give the encoder a keyframe interval of 500ms.
   // Pass frames at 0, 400ms, 600ms, 750ms, 900ms, 1100ms
   // Expected keys: ^         ^^^^^                ^^^^^^
   YUVBufferGenerator generator;
   generator.Init(mozilla::gfx::IntSize(640, 480));
   TimeStamp now = TimeStamp::Now();
   VideoSegment segment;
-  segment.AppendFrame(
-      generator.GenerateI420Image(),
-      mozilla::StreamTime(VIDEO_TRACK_RATE / 1000 * 400),  // 400ms
-      generator.GetSize(), PRINCIPAL_HANDLE_NONE, false, now);
-  segment.AppendFrame(
-      generator.GenerateI420Image(),
-      mozilla::StreamTime(VIDEO_TRACK_RATE / 1000 * 200),  // 200ms
-      generator.GetSize(), PRINCIPAL_HANDLE_NONE, false,
-      now + TimeDuration::FromMilliseconds(400));
-  segment.AppendFrame(
-      generator.GenerateI420Image(),
-      mozilla::StreamTime(VIDEO_TRACK_RATE / 1000 * 150),  // 150ms
-      generator.GetSize(), PRINCIPAL_HANDLE_NONE, false,
-      now + TimeDuration::FromMilliseconds(600));
-  segment.AppendFrame(
-      generator.GenerateI420Image(),
-      mozilla::StreamTime(VIDEO_TRACK_RATE / 1000 * 150),  // 150ms
-      generator.GetSize(), PRINCIPAL_HANDLE_NONE, false,
-      now + TimeDuration::FromMilliseconds(750));
-  segment.AppendFrame(
-      generator.GenerateI420Image(),
-      mozilla::StreamTime(VIDEO_TRACK_RATE / 1000 * 200),  // 200ms
-      generator.GetSize(), PRINCIPAL_HANDLE_NONE, false,
-      now + TimeDuration::FromMilliseconds(900));
-  segment.AppendFrame(
-      generator.GenerateI420Image(),
-      mozilla::StreamTime(VIDEO_TRACK_RATE / 1000 * 100),  // 100ms
-      generator.GetSize(), PRINCIPAL_HANDLE_NONE, false,
-      now + TimeDuration::FromMilliseconds(1100));
+  segment.AppendFrame(generator.GenerateI420Image(), 1, generator.GetSize(),
+                      PRINCIPAL_HANDLE_NONE, false, now);
+  segment.AppendFrame(generator.GenerateI420Image(), 1, generator.GetSize(),
+                      PRINCIPAL_HANDLE_NONE, false,
+                      now + TimeDuration::FromMilliseconds(400));
+  segment.AppendFrame(generator.GenerateI420Image(), 1, generator.GetSize(),
+                      PRINCIPAL_HANDLE_NONE, false,
+                      now + TimeDuration::FromMilliseconds(600));
+  segment.AppendFrame(generator.GenerateI420Image(), 1, generator.GetSize(),
+                      PRINCIPAL_HANDLE_NONE, false,
+                      now + TimeDuration::FromMilliseconds(750));
+  segment.AppendFrame(generator.GenerateI420Image(), 1, generator.GetSize(),
+                      PRINCIPAL_HANDLE_NONE, false,
+                      now + TimeDuration::FromMilliseconds(900));
+  segment.AppendFrame(generator.GenerateI420Image(), 1, generator.GetSize(),
+                      PRINCIPAL_HANDLE_NONE, false,
+                      now + TimeDuration::FromMilliseconds(1100));
 
   encoder.SetKeyFrameInterval(500);
-  encoder.SetStartOffset(0);
+  encoder.SetStartOffset(now);
   encoder.AppendVideoSegment(std::move(segment));
-  encoder.AdvanceCurrentTime(VIDEO_TRACK_RATE / 10 * 12);  // 1200ms
+  encoder.AdvanceCurrentTime(now + TimeDuration::FromSeconds(1.2));
   encoder.NotifyEndOfStream();
 
   EncodedFrameContainer container;
   ASSERT_TRUE(NS_SUCCEEDED(encoder.GetEncodedTrack(container)));
 
   EXPECT_TRUE(encoder.IsEncodingComplete());
 
   const nsTArray<RefPtr<EncodedFrame>>& frames = container.GetEncodedFrames();
@@ -978,50 +1003,38 @@ TEST(VP8VideoTrackEncoder, LongKeyFrameI
 
   // Give the encoder a keyframe interval of 2000ms.
   // Pass frames at 0, 600ms, 900ms, 1100ms, 1900ms, 2100ms
   // Expected keys: ^                ^^^^^^          ^^^^^^
   YUVBufferGenerator generator;
   generator.Init(mozilla::gfx::IntSize(640, 480));
   TimeStamp now = TimeStamp::Now();
   VideoSegment segment;
-  segment.AppendFrame(
-      generator.GenerateI420Image(),
-      mozilla::StreamTime(VIDEO_TRACK_RATE / 1000 * 600),  // 600ms
-      generator.GetSize(), PRINCIPAL_HANDLE_NONE, false, now);
-  segment.AppendFrame(
-      generator.GenerateI420Image(),
-      mozilla::StreamTime(VIDEO_TRACK_RATE / 1000 * 300),  // 300ms
-      generator.GetSize(), PRINCIPAL_HANDLE_NONE, false,
-      now + TimeDuration::FromMilliseconds(600));
-  segment.AppendFrame(
-      generator.GenerateI420Image(),
-      mozilla::StreamTime(VIDEO_TRACK_RATE / 1000 * 200),  // 200ms
-      generator.GetSize(), PRINCIPAL_HANDLE_NONE, false,
-      now + TimeDuration::FromMilliseconds(900));
-  segment.AppendFrame(
-      generator.GenerateI420Image(),
-      mozilla::StreamTime(VIDEO_TRACK_RATE / 1000 * 800),  // 800ms
-      generator.GetSize(), PRINCIPAL_HANDLE_NONE, false,
-      now + TimeDuration::FromMilliseconds(1100));
-  segment.AppendFrame(
-      generator.GenerateI420Image(),
-      mozilla::StreamTime(VIDEO_TRACK_RATE / 1000 * 200),  // 200ms
-      generator.GetSize(), PRINCIPAL_HANDLE_NONE, false,
-      now + TimeDuration::FromMilliseconds(1900));
-  segment.AppendFrame(
-      generator.GenerateI420Image(),
-      mozilla::StreamTime(VIDEO_TRACK_RATE / 1000 * 100),  // 100ms
-      generator.GetSize(), PRINCIPAL_HANDLE_NONE, false,
-      now + TimeDuration::FromMilliseconds(2100));
+  segment.AppendFrame(generator.GenerateI420Image(), 1, generator.GetSize(),
+                      PRINCIPAL_HANDLE_NONE, false, now);
+  segment.AppendFrame(generator.GenerateI420Image(), 1, generator.GetSize(),
+                      PRINCIPAL_HANDLE_NONE, false,
+                      now + TimeDuration::FromMilliseconds(600));
+  segment.AppendFrame(generator.GenerateI420Image(), 1, generator.GetSize(),
+                      PRINCIPAL_HANDLE_NONE, false,
+                      now + TimeDuration::FromMilliseconds(900));
+  segment.AppendFrame(generator.GenerateI420Image(), 1, generator.GetSize(),
+                      PRINCIPAL_HANDLE_NONE, false,
+                      now + TimeDuration::FromMilliseconds(1100));
+  segment.AppendFrame(generator.GenerateI420Image(), 1, generator.GetSize(),
+                      PRINCIPAL_HANDLE_NONE, false,
+                      now + TimeDuration::FromMilliseconds(1900));
+  segment.AppendFrame(generator.GenerateI420Image(), 1, generator.GetSize(),
+                      PRINCIPAL_HANDLE_NONE, false,
+                      now + TimeDuration::FromMilliseconds(2100));
 
   encoder.SetKeyFrameInterval(2000);
-  encoder.SetStartOffset(0);
+  encoder.SetStartOffset(now);
   encoder.AppendVideoSegment(std::move(segment));
-  encoder.AdvanceCurrentTime(VIDEO_TRACK_RATE / 10 * 22);  // 2200ms
+  encoder.AdvanceCurrentTime(now + TimeDuration::FromSeconds(2.2));
   encoder.NotifyEndOfStream();
 
   EncodedFrameContainer container;
   ASSERT_TRUE(NS_SUCCEEDED(encoder.GetEncodedTrack(container)));
 
   EXPECT_TRUE(encoder.IsEncodingComplete());
 
   const nsTArray<RefPtr<EncodedFrame>>& frames = container.GetEncodedFrames();
@@ -1058,49 +1071,37 @@ TEST(VP8VideoTrackEncoder, DefaultKeyFra
   TestVP8TrackEncoder encoder;
 
   // Pass frames at 0, 600ms, 900ms, 1100ms, 1900ms, 2100ms
   // Expected keys: ^                ^^^^^^          ^^^^^^
   YUVBufferGenerator generator;
   generator.Init(mozilla::gfx::IntSize(640, 480));
   TimeStamp now = TimeStamp::Now();
   VideoSegment segment;
-  segment.AppendFrame(
-      generator.GenerateI420Image(),
-      mozilla::StreamTime(VIDEO_TRACK_RATE / 1000 * 600),  // 600ms
-      generator.GetSize(), PRINCIPAL_HANDLE_NONE, false, now);
-  segment.AppendFrame(
-      generator.GenerateI420Image(),
-      mozilla::StreamTime(VIDEO_TRACK_RATE / 1000 * 300),  // 300ms
-      generator.GetSize(), PRINCIPAL_HANDLE_NONE, false,
-      now + TimeDuration::FromMilliseconds(600));
-  segment.AppendFrame(
-      generator.GenerateI420Image(),
-      mozilla::StreamTime(VIDEO_TRACK_RATE / 1000 * 200),  // 200ms
-      generator.GetSize(), PRINCIPAL_HANDLE_NONE, false,
-      now + TimeDuration::FromMilliseconds(900));
-  segment.AppendFrame(
-      generator.GenerateI420Image(),
-      mozilla::StreamTime(VIDEO_TRACK_RATE / 1000 * 800),  // 800ms
-      generator.GetSize(), PRINCIPAL_HANDLE_NONE, false,
-      now + TimeDuration::FromMilliseconds(1100));
-  segment.AppendFrame(
-      generator.GenerateI420Image(),
-      mozilla::StreamTime(VIDEO_TRACK_RATE / 1000 * 200),  // 200ms
-      generator.GetSize(), PRINCIPAL_HANDLE_NONE, false,
-      now + TimeDuration::FromMilliseconds(1900));
-  segment.AppendFrame(
-      generator.GenerateI420Image(),
-      mozilla::StreamTime(VIDEO_TRACK_RATE / 1000 * 100),  // 100ms
-      generator.GetSize(), PRINCIPAL_HANDLE_NONE, false,
-      now + TimeDuration::FromMilliseconds(2100));
+  segment.AppendFrame(generator.GenerateI420Image(), 1, generator.GetSize(),
+                      PRINCIPAL_HANDLE_NONE, false, now);
+  segment.AppendFrame(generator.GenerateI420Image(), 1, generator.GetSize(),
+                      PRINCIPAL_HANDLE_NONE, false,
+                      now + TimeDuration::FromMilliseconds(600));
+  segment.AppendFrame(generator.GenerateI420Image(), 1, generator.GetSize(),
+                      PRINCIPAL_HANDLE_NONE, false,
+                      now + TimeDuration::FromMilliseconds(900));
+  segment.AppendFrame(generator.GenerateI420Image(), 1, generator.GetSize(),
+                      PRINCIPAL_HANDLE_NONE, false,
+                      now + TimeDuration::FromMilliseconds(1100));
+  segment.AppendFrame(generator.GenerateI420Image(), 1, generator.GetSize(),
+                      PRINCIPAL_HANDLE_NONE, false,
+                      now + TimeDuration::FromMilliseconds(1900));
+  segment.AppendFrame(generator.GenerateI420Image(), 1, generator.GetSize(),
+                      PRINCIPAL_HANDLE_NONE, false,
+                      now + TimeDuration::FromMilliseconds(2100));
 
-  encoder.SetStartOffset(0);
+  encoder.SetStartOffset(now);
   encoder.AppendVideoSegment(std::move(segment));
-  encoder.AdvanceCurrentTime(VIDEO_TRACK_RATE / 10 * 22);  // 2200ms
+  encoder.AdvanceCurrentTime(now + TimeDuration::FromSeconds(2.2));
   encoder.NotifyEndOfStream();
 
   EncodedFrameContainer container;
   ASSERT_TRUE(NS_SUCCEEDED(encoder.GetEncodedTrack(container)));
 
   EXPECT_TRUE(encoder.IsEncodingComplete());
 
   const nsTArray<RefPtr<EncodedFrame>>& frames = container.GetEncodedFrames();
@@ -1147,116 +1148,88 @@ TEST(VP8VideoTrackEncoder, DynamicKeyFra
   // Then decrease keyframe interval to 200ms.
   // Pass frames at 2500ms, 2600ms, 2800ms, 2900ms
   // Expected keys:         ^^^^^^  ^^^^^^
   YUVBufferGenerator generator;
   generator.Init(mozilla::gfx::IntSize(640, 480));
   EncodedFrameContainer container;
   TimeStamp now = TimeStamp::Now();
   VideoSegment segment;
-  segment.AppendFrame(
-      generator.GenerateI420Image(),
-      mozilla::StreamTime(VIDEO_TRACK_RATE / 1000 * 100),  // 100ms
-      generator.GetSize(), PRINCIPAL_HANDLE_NONE, false, now);
-  segment.AppendFrame(
-      generator.GenerateI420Image(),
-      mozilla::StreamTime(VIDEO_TRACK_RATE / 1000 * 20),  // 20ms
-      generator.GetSize(), PRINCIPAL_HANDLE_NONE, false,
-      now + TimeDuration::FromMilliseconds(100));
-  segment.AppendFrame(
-      generator.GenerateI420Image(),
-      mozilla::StreamTime(VIDEO_TRACK_RATE / 1000 * 10),  // 10ms
-      generator.GetSize(), PRINCIPAL_HANDLE_NONE, false,
-      now + TimeDuration::FromMilliseconds(120));
-  segment.AppendFrame(
-      generator.GenerateI420Image(),
-      mozilla::StreamTime(VIDEO_TRACK_RATE / 1000 * 70),  // 70ms
-      generator.GetSize(), PRINCIPAL_HANDLE_NONE, false,
-      now + TimeDuration::FromMilliseconds(130));
-  segment.AppendFrame(
-      generator.GenerateI420Image(),
-      mozilla::StreamTime(VIDEO_TRACK_RATE / 1000 * 100),  // 100ms
-      generator.GetSize(), PRINCIPAL_HANDLE_NONE, false,
-      now + TimeDuration::FromMilliseconds(200));
-  segment.AppendFrame(
-      generator.GenerateI420Image(),
-      mozilla::StreamTime(VIDEO_TRACK_RATE / 1000 * 200),  // 200ms
-      generator.GetSize(), PRINCIPAL_HANDLE_NONE, false,
-      now + TimeDuration::FromMilliseconds(300));
+  segment.AppendFrame(generator.GenerateI420Image(), 1, generator.GetSize(),
+                      PRINCIPAL_HANDLE_NONE, false, now);
+  segment.AppendFrame(generator.GenerateI420Image(), 1, generator.GetSize(),
+                      PRINCIPAL_HANDLE_NONE, false,
+                      now + TimeDuration::FromMilliseconds(100));
+  segment.AppendFrame(generator.GenerateI420Image(), 1, generator.GetSize(),
+                      PRINCIPAL_HANDLE_NONE, false,
+                      now + TimeDuration::FromMilliseconds(120));
+  segment.AppendFrame(generator.GenerateI420Image(), 1, generator.GetSize(),
+                      PRINCIPAL_HANDLE_NONE, false,
+                      now + TimeDuration::FromMilliseconds(130));
+  segment.AppendFrame(generator.GenerateI420Image(), 1, generator.GetSize(),
+                      PRINCIPAL_HANDLE_NONE, false,
+                      now + TimeDuration::FromMilliseconds(200));
+  segment.AppendFrame(generator.GenerateI420Image(), 1, generator.GetSize(),
+                      PRINCIPAL_HANDLE_NONE, false,
+                      now + TimeDuration::FromMilliseconds(300));
 
   // The underlying encoder only gets passed frame N when frame N+1 is known,
   // so we pass in the next frame *before* the keyframe interval change.
-  segment.AppendFrame(
-      generator.GenerateI420Image(),
-      mozilla::StreamTime(VIDEO_TRACK_RATE / 1000 * 800),  // 800ms
-      generator.GetSize(), PRINCIPAL_HANDLE_NONE, false,
-      now + TimeDuration::FromMilliseconds(500));
+  segment.AppendFrame(generator.GenerateI420Image(), 1, generator.GetSize(),
+                      PRINCIPAL_HANDLE_NONE, false,
+                      now + TimeDuration::FromMilliseconds(500));
 
-  encoder.SetStartOffset(0);
+  encoder.SetStartOffset(now);
   encoder.SetKeyFrameInterval(100);
   encoder.AppendVideoSegment(std::move(segment));
 
   // Advancing 501ms, so the first bit of the frame starting at 500ms is
-  // included. Note the need to compensate this at the end.
-  encoder.AdvanceCurrentTime(VIDEO_TRACK_RATE / 1000 * 501);
+  // included.
+  encoder.AdvanceCurrentTime(now + TimeDuration::FromMilliseconds(501));
   ASSERT_TRUE(NS_SUCCEEDED(encoder.GetEncodedTrack(container)));
 
-  segment.AppendFrame(
-      generator.GenerateI420Image(),
-      mozilla::StreamTime(VIDEO_TRACK_RATE / 1000 * 100),  // 100ms
-      generator.GetSize(), PRINCIPAL_HANDLE_NONE, false,
-      now + TimeDuration::FromMilliseconds(1300));
-  segment.AppendFrame(
-      generator.GenerateI420Image(),
-      mozilla::StreamTime(VIDEO_TRACK_RATE / 1000 * 1000),  // 1000ms
-      generator.GetSize(), PRINCIPAL_HANDLE_NONE, false,
-      now + TimeDuration::FromMilliseconds(1400));
-  segment.AppendFrame(
-      generator.GenerateI420Image(),
-      mozilla::StreamTime(VIDEO_TRACK_RATE / 1000 * 100),  // 100ms
-      generator.GetSize(), PRINCIPAL_HANDLE_NONE, false,
-      now + TimeDuration::FromMilliseconds(2400));
+  segment.AppendFrame(generator.GenerateI420Image(), 1, generator.GetSize(),
+                      PRINCIPAL_HANDLE_NONE, false,
+                      now + TimeDuration::FromMilliseconds(1300));
+  segment.AppendFrame(generator.GenerateI420Image(), 1, generator.GetSize(),
+                      PRINCIPAL_HANDLE_NONE, false,
+                      now + TimeDuration::FromMilliseconds(1400));
+  segment.AppendFrame(generator.GenerateI420Image(), 1, generator.GetSize(),
+                      PRINCIPAL_HANDLE_NONE, false,
+                      now + TimeDuration::FromMilliseconds(2400));
 
   // The underlying encoder only gets passed frame N when frame N+1 is known,
   // so we pass in the next frame *before* the keyframe interval change.
-  segment.AppendFrame(
-      generator.GenerateI420Image(),
-      mozilla::StreamTime(VIDEO_TRACK_RATE / 1000 * 100),  // 100ms
-      generator.GetSize(), PRINCIPAL_HANDLE_NONE, false,
-      now + TimeDuration::FromMilliseconds(2500));
+  segment.AppendFrame(generator.GenerateI420Image(), 1, generator.GetSize(),
+                      PRINCIPAL_HANDLE_NONE, false,
+                      now + TimeDuration::FromMilliseconds(2500));
 
   encoder.SetKeyFrameInterval(1100);
   encoder.AppendVideoSegment(std::move(segment));
 
   // Advancing 2000ms from 501ms to 2501ms
-  encoder.AdvanceCurrentTime(VIDEO_TRACK_RATE / 1000 * 2000);
+  encoder.AdvanceCurrentTime(now + TimeDuration::FromMilliseconds(2501));
   ASSERT_TRUE(NS_SUCCEEDED(encoder.GetEncodedTrack(container)));
 
-  segment.AppendFrame(
-      generator.GenerateI420Image(),
-      mozilla::StreamTime(VIDEO_TRACK_RATE / 1000 * 200),  // 200ms
-      generator.GetSize(), PRINCIPAL_HANDLE_NONE, false,
-      now + TimeDuration::FromMilliseconds(2600));
-  segment.AppendFrame(
-      generator.GenerateI420Image(),
-      mozilla::StreamTime(VIDEO_TRACK_RATE / 1000 * 100),  // 100ms
-      generator.GetSize(), PRINCIPAL_HANDLE_NONE, false,
-      now + TimeDuration::FromMilliseconds(2800));
-  segment.AppendFrame(
-      generator.GenerateI420Image(),
-      mozilla::StreamTime(VIDEO_TRACK_RATE / 1000 * 100),  // 100ms
-      generator.GetSize(), PRINCIPAL_HANDLE_NONE, false,
-      now + TimeDuration::FromMilliseconds(2900));
+  segment.AppendFrame(generator.GenerateI420Image(), 1, generator.GetSize(),
+                      PRINCIPAL_HANDLE_NONE, false,
+                      now + TimeDuration::FromMilliseconds(2600));
+  segment.AppendFrame(generator.GenerateI420Image(), 1, generator.GetSize(),
+                      PRINCIPAL_HANDLE_NONE, false,
+                      now + TimeDuration::FromMilliseconds(2800));
+  segment.AppendFrame(generator.GenerateI420Image(), 1, generator.GetSize(),
+                      PRINCIPAL_HANDLE_NONE, false,
+                      now + TimeDuration::FromMilliseconds(2900));
 
   encoder.SetKeyFrameInterval(200);
   encoder.AppendVideoSegment(std::move(segment));
 
   // Advancing 499ms (compensating back 1ms from the first advancement)
   // from 2501ms to 3000ms.
-  encoder.AdvanceCurrentTime(VIDEO_TRACK_RATE / 1000 * 499);
+  encoder.AdvanceCurrentTime(now + TimeDuration::FromMilliseconds(3000));
 
   encoder.NotifyEndOfStream();
 
   ASSERT_TRUE(NS_SUCCEEDED(encoder.GetEncodedTrack(container)));
 
   EXPECT_TRUE(encoder.IsEncodingComplete());
 
   const nsTArray<RefPtr<EncodedFrame>>& frames = container.GetEncodedFrames();