Bug 1423253 - Remove durations from VideoSegment::AppendFrame. r=padenot
authorAndreas Pehrson <apehrson@mozilla.com>
Fri, 22 Mar 2019 11:43:40 +0000
changeset 465645 387b32b7e55d547e6b495b7b086116a5313e748e
parent 465644 d8af2cceb3c8f4c3490f852b3187687224bba393
child 465646 ba0778323644e3d0efba103f80cd909a3c2c84e3
push id35744
push userapavel@mozilla.com
push dateFri, 22 Mar 2019 16:44:08 +0000
treeherdermozilla-central@e66a2b59914d [default view] [failures only]
perfherder[talos] [build metrics] [platform microbench] (compared to previous push)
reviewerspadenot
bugs1423253
milestone68.0a1
first release with
nightly linux32
nightly linux64
nightly mac
nightly win32
nightly win64
last release without
nightly linux32
nightly linux64
nightly mac
nightly win32
nightly win64
Bug 1423253 - Remove durations from VideoSegment::AppendFrame. r=padenot VideoSegments still have durations, and they are still needed by the MediaStreamGraph as it shuffles MediaSegments around. They do not have a say in the wall-clock duration of video frames however. Removing this should prevent any producers starting to add video chunks with durations in the future. Differential Revision: https://phabricator.services.mozilla.com/D22914
dom/media/CanvasCaptureMediaStream.cpp
dom/media/MediaStreamGraph.cpp
dom/media/MediaStreamListener.cpp
dom/media/VideoSegment.cpp
dom/media/VideoSegment.h
dom/media/VideoStreamTrack.cpp
dom/media/encoder/MediaEncoder.cpp
dom/media/encoder/TrackEncoder.cpp
dom/media/gtest/TestVideoSegment.cpp
dom/media/gtest/TestVideoTrackEncoder.cpp
dom/media/mediasink/DecodedStream.cpp
dom/media/webrtc/MediaEngineDefault.cpp
dom/media/webrtc/MediaEngineRemoteVideoSource.cpp
dom/media/webrtc/MediaEngineTabVideoSource.cpp
media/webrtc/signaling/src/mediapipeline/MediaPipeline.cpp
--- a/dom/media/CanvasCaptureMediaStream.cpp
+++ b/dom/media/CanvasCaptureMediaStream.cpp
@@ -49,17 +49,17 @@ void OutputStreamDriver::EndTrack() {
 
 void OutputStreamDriver::SetImage(const RefPtr<layers::Image>& aImage,
                                   const TimeStamp& aTime) {
   MOZ_ASSERT(NS_IsMainThread());
 
   TRACE_COMMENT("SourceMediaStream %p track %i", mSourceStream.get(), mTrackId);
 
   VideoSegment segment;
-  segment.AppendFrame(do_AddRef(aImage), 1, aImage->GetSize(), mPrincipalHandle,
+  segment.AppendFrame(do_AddRef(aImage), aImage->GetSize(), mPrincipalHandle,
                       false, aTime);
   mSourceStream->AppendToTrack(mTrackId, &segment);
 }
 
 // ----------------------------------------------------------------------
 
 class TimerDriver : public OutputStreamDriver {
  public:
--- a/dom/media/MediaStreamGraph.cpp
+++ b/dom/media/MediaStreamGraph.cpp
@@ -2792,33 +2792,33 @@ void SourceMediaStream::AddDirectTrackLi
     VideoSegment& trackSegment =
         static_cast<VideoSegment&>(*track->GetSegment());
     for (VideoSegment::ConstChunkIterator iter(trackSegment); !iter.IsEnded();
          iter.Next()) {
       if (iter->IsNull()) {
         continue;
       }
       MOZ_ASSERT(!iter->mTimeStamp.IsNull());
-      bufferedVideo.AppendFrame(do_AddRef(iter->mFrame.GetImage()), 1,
+      bufferedVideo.AppendFrame(do_AddRef(iter->mFrame.GetImage()),
                                 iter->mFrame.GetIntrinsicSize(),
                                 iter->mFrame.GetPrincipalHandle(),
                                 iter->mFrame.GetForceBlack(), iter->mTimeStamp);
     }
 
     if (TrackData* updateData = FindDataForTrack(aTrackID)) {
       VideoSegment& video = static_cast<VideoSegment&>(*updateData->mData);
       for (VideoSegment::ConstChunkIterator iter(video); !iter.IsEnded();
            iter.Next()) {
         if (iter->IsNull()) {
           continue;
         }
         bufferedVideo.AppendFrame(
-            do_AddRef(iter->mFrame.GetImage()), 1,
-            iter->mFrame.GetIntrinsicSize(), iter->mFrame.GetPrincipalHandle(),
-            iter->mFrame.GetForceBlack(), iter->mTimeStamp);
+            do_AddRef(iter->mFrame.GetImage()), iter->mFrame.GetIntrinsicSize(),
+            iter->mFrame.GetPrincipalHandle(), iter->mFrame.GetForceBlack(),
+            iter->mTimeStamp);
       }
     }
   }
 
   MediaSegment& bufferedData = isAudio
                                    ? static_cast<MediaSegment&>(bufferedAudio)
                                    : static_cast<MediaSegment&>(bufferedVideo);
   if (bufferedData.GetDuration() != 0) {
--- a/dom/media/MediaStreamListener.cpp
+++ b/dom/media/MediaStreamListener.cpp
@@ -23,19 +23,20 @@ void DirectMediaStreamTrackListener::Mir
   aTo.AppendNullData(aFrom.GetDuration());
 }
 
 void DirectMediaStreamTrackListener::MirrorAndDisableSegment(
     VideoSegment& aFrom, VideoSegment& aTo, DisabledTrackMode aMode) {
   aTo.Clear();
   if (aMode == DisabledTrackMode::SILENCE_BLACK) {
     for (VideoSegment::ChunkIterator it(aFrom); !it.IsEnded(); it.Next()) {
-      aTo.AppendFrame(do_AddRef(it->mFrame.GetImage()), it->GetDuration(),
+      aTo.AppendFrame(do_AddRef(it->mFrame.GetImage()),
                       it->mFrame.GetIntrinsicSize(), it->GetPrincipalHandle(),
                       true);
+      aTo.ExtendLastFrameBy(it->GetDuration());
     }
   } else if (aMode == DisabledTrackMode::SILENCE_FREEZE) {
     aTo.AppendNullData(aFrom.GetDuration());
   }
 }
 
 void DirectMediaStreamTrackListener::
     NotifyRealtimeTrackDataAndApplyTrackDisabling(MediaStreamGraph* aGraph,
--- a/dom/media/VideoSegment.cpp
+++ b/dom/media/VideoSegment.cpp
@@ -81,21 +81,20 @@ already_AddRefed<Image> VideoFrame::Crea
   if (!image->CopyData(data)) {
     return nullptr;
   }
 
   return image.forget();
 }
 
 void VideoSegment::AppendFrame(already_AddRefed<Image>&& aImage,
-                               StreamTime aDuration,
                                const IntSize& aIntrinsicSize,
                                const PrincipalHandle& aPrincipalHandle,
                                bool aForceBlack, TimeStamp aTimeStamp) {
-  VideoChunk* chunk = AppendChunk(aDuration);
+  VideoChunk* chunk = AppendChunk(0);
   chunk->mTimeStamp = aTimeStamp;
   VideoFrame frame(aImage, aIntrinsicSize);
   MOZ_ASSERT_IF(!IsNull(), !aTimeStamp.IsNull());
   frame.SetForceBlack(aForceBlack);
   frame.SetPrincipalHandle(aPrincipalHandle);
   chunk->mFrame.TakeFrom(&frame);
 }
 
--- a/dom/media/VideoSegment.h
+++ b/dom/media/VideoSegment.h
@@ -103,17 +103,17 @@ class VideoSegment : public MediaSegment
   VideoSegment();
   VideoSegment(VideoSegment&& aSegment);
 
   VideoSegment(const VideoSegment&) = delete;
   VideoSegment& operator=(const VideoSegment&) = delete;
 
   ~VideoSegment();
 
-  void AppendFrame(already_AddRefed<Image>&& aImage, StreamTime aDuration,
+  void AppendFrame(already_AddRefed<Image>&& aImage,
                    const IntSize& aIntrinsicSize,
                    const PrincipalHandle& aPrincipalHandle,
                    bool aForceBlack = false,
                    TimeStamp aTimeStamp = TimeStamp::Now());
   void ExtendLastFrameBy(StreamTime aDuration) {
     if (aDuration <= 0) {
       return;
     }
--- a/dom/media/VideoStreamTrack.cpp
+++ b/dom/media/VideoStreamTrack.cpp
@@ -28,17 +28,17 @@ class VideoOutput : public DirectMediaSt
     for (VideoSegment::ConstChunkIterator i(video); !i.IsEnded(); i.Next()) {
       if (!mLastFrameTime.IsNull() && i->mTimeStamp < mLastFrameTime) {
         // Time can go backwards if the source is a captured MediaDecoder and
         // it seeks, as the previously buffered frames would stretch into the
         // future. If this happens, we clear the buffered frames and start over.
         mSegment.Clear();
       }
       const VideoFrame& f = i->mFrame;
-      mSegment.AppendFrame(do_AddRef(f.GetImage()), 0, f.GetIntrinsicSize(),
+      mSegment.AppendFrame(do_AddRef(f.GetImage()), f.GetIntrinsicSize(),
                            f.GetPrincipalHandle(), f.GetForceBlack(),
                            i->mTimeStamp);
       mLastFrameTime = i->mTimeStamp;
     }
     mVideoFrameContainer->SetCurrentFrames(mSegment);
   }
   void NotifyRemoved() override {
     mSegment.Clear();
--- a/dom/media/encoder/MediaEncoder.cpp
+++ b/dom/media/encoder/MediaEncoder.cpp
@@ -230,17 +230,17 @@ class MediaEncoder::VideoTrackListener :
     if (mShutdown) {
       return;
     }
 
     const VideoSegment& video = static_cast<const VideoSegment&>(aMedia);
     VideoSegment copy;
     for (VideoSegment::ConstChunkIterator iter(video); !iter.IsEnded();
          iter.Next()) {
-      copy.AppendFrame(do_AddRef(iter->mFrame.GetImage()), 1,
+      copy.AppendFrame(do_AddRef(iter->mFrame.GetImage()),
                        iter->mFrame.GetIntrinsicSize(),
                        iter->mFrame.GetPrincipalHandle(),
                        iter->mFrame.GetForceBlack(), iter->mTimeStamp);
     }
 
     nsresult rv = mEncoderThread->Dispatch(
         NewRunnableMethod<StoreCopyPassByRRef<VideoSegment>>(
             "mozilla::VideoTrackEncoder::AppendVideoSegment", mEncoder,
--- a/dom/media/encoder/TrackEncoder.cpp
+++ b/dom/media/encoder/TrackEncoder.cpp
@@ -477,19 +477,20 @@ void VideoTrackEncoder::NotifyEndOfStrea
       TRACK_LOG(LogLevel::Debug,
                 ("[VideoTrackEncoder %p]: Appending last video frame %p at pos "
                  "%.3fs, "
                  "track-end=%.3fs",
                  this, lastImage.get(),
                  (mLastChunk.mTimeStamp - mStartTime).ToSeconds(),
                  absoluteEndTime.ToSeconds()));
       mOutgoingBuffer.AppendFrame(
-          lastImage.forget(), duration.value(),
-          mLastChunk.mFrame.GetIntrinsicSize(), PRINCIPAL_HANDLE_NONE,
-          mLastChunk.mFrame.GetForceBlack(), mLastChunk.mTimeStamp);
+          lastImage.forget(), mLastChunk.mFrame.GetIntrinsicSize(),
+          PRINCIPAL_HANDLE_NONE, mLastChunk.mFrame.GetForceBlack(),
+          mLastChunk.mTimeStamp);
+      mOutgoingBuffer.ExtendLastFrameBy(duration.value());
     }
   }
 
   mIncomingBuffer.Clear();
   mLastChunk.SetNull(0);
 
   if (mInitialized && !mCanceled) {
     OnDataAvailable();
@@ -539,17 +540,17 @@ void VideoTrackEncoder::AdvanceCurrentTi
   VideoSegment tempSegment;
   {
     VideoChunk* previousChunk = &mLastChunk;
     auto appendDupes = [&](const TimeStamp& aUpTo) {
       while ((aUpTo - previousChunk->mTimeStamp).ToSeconds() > 1.0) {
         // We encode at least one frame per second, even if there are none
         // flowing.
         previousChunk->mTimeStamp += TimeDuration::FromSeconds(1.0);
-        tempSegment.AppendFrame(do_AddRef(previousChunk->mFrame.GetImage()), 1,
+        tempSegment.AppendFrame(do_AddRef(previousChunk->mFrame.GetImage()),
                                 previousChunk->mFrame.GetIntrinsicSize(),
                                 previousChunk->mFrame.GetPrincipalHandle(),
                                 previousChunk->mFrame.GetForceBlack(),
                                 previousChunk->mTimeStamp);
         TRACK_LOG(
             LogLevel::Verbose,
             ("[VideoTrackEncoder %p]: Duplicating video frame (%p) at pos %.3f",
              this, previousChunk->mFrame.GetImage(),
@@ -566,17 +567,17 @@ void VideoTrackEncoder::AdvanceCurrentTi
       }
       if (iter->mTimeStamp >= aTime) {
         // This frame starts in the future. Stop.
         break;
       }
       if (!previousChunk->IsNull()) {
         appendDupes(iter->mTimeStamp);
       }
-      tempSegment.AppendFrame(do_AddRef(iter->mFrame.GetImage()), 1,
+      tempSegment.AppendFrame(do_AddRef(iter->mFrame.GetImage()),
                               iter->mFrame.GetIntrinsicSize(),
                               iter->mFrame.GetPrincipalHandle(),
                               iter->mFrame.GetForceBlack(), iter->mTimeStamp);
       TRACK_LOG(LogLevel::Verbose,
                 ("[VideoTrackEncoder %p]: Taking video frame (%p) at pos %.3f",
                  this, iter->mFrame.GetImage(),
                  (iter->mTimeStamp - mStartTime).ToSeconds()));
       previousChunk = &*iter;
@@ -649,19 +650,20 @@ void VideoTrackEncoder::AdvanceCurrentTi
       TimeStamp t = mLastChunk.mTimeStamp;
       mLastChunk = *iter;
       mLastChunk.mTimeStamp = t;
       continue;
     }
 
     mEncodedTicks += duration.value();
     mOutgoingBuffer.AppendFrame(
-        do_AddRef(mLastChunk.mFrame.GetImage()), duration.value(),
+        do_AddRef(mLastChunk.mFrame.GetImage()),
         mLastChunk.mFrame.GetIntrinsicSize(), PRINCIPAL_HANDLE_NONE,
         mLastChunk.mFrame.GetForceBlack(), mLastChunk.mTimeStamp);
+    mOutgoingBuffer.ExtendLastFrameBy(duration.value());
     chunkAppended = true;
     mLastChunk = chunk;
   }
 
   if (chunkAppended) {
     Init(mOutgoingBuffer, mCurrentTime);
     if (mInitialized) {
       OnDataAvailable();
--- a/dom/media/gtest/TestVideoSegment.cpp
+++ b/dom/media/gtest/TestVideoSegment.cpp
@@ -12,34 +12,33 @@ namespace layer {
 class Image;
 }  // namespace layer
 }  // namespace mozilla
 
 TEST(VideoSegment, TestAppendFrameForceBlack) {
   RefPtr<layers::Image> testImage = nullptr;
 
   VideoSegment segment;
-  segment.AppendFrame(testImage.forget(), mozilla::StreamTime(90000),
-                      mozilla::gfx::IntSize(640, 480), PRINCIPAL_HANDLE_NONE,
-                      true);
+  segment.AppendFrame(testImage.forget(), mozilla::gfx::IntSize(640, 480),
+                      PRINCIPAL_HANDLE_NONE, true);
 
   VideoSegment::ChunkIterator iter(segment);
   while (!iter.IsEnded()) {
     VideoChunk chunk = *iter;
     EXPECT_TRUE(chunk.mFrame.GetForceBlack());
     iter.Next();
   }
 }
 
 TEST(VideoSegment, TestAppendFrameNotForceBlack) {
   RefPtr<layers::Image> testImage = nullptr;
 
   VideoSegment segment;
-  segment.AppendFrame(testImage.forget(), mozilla::StreamTime(90000),
-                      mozilla::gfx::IntSize(640, 480), PRINCIPAL_HANDLE_NONE);
+  segment.AppendFrame(testImage.forget(), mozilla::gfx::IntSize(640, 480),
+                      PRINCIPAL_HANDLE_NONE);
 
   VideoSegment::ChunkIterator iter(segment);
   while (!iter.IsEnded()) {
     VideoChunk chunk = *iter;
     EXPECT_FALSE(chunk.mFrame.GetForceBlack());
     iter.Next();
   }
 }
--- a/dom/media/gtest/TestVideoTrackEncoder.cpp
+++ b/dom/media/gtest/TestVideoTrackEncoder.cpp
@@ -276,17 +276,17 @@ TEST(VP8VideoTrackEncoder, FrameEncode) 
   images.AppendElement(generator.GenerateNV21Image());
 
   // Put generated YUV frame into video segment.
   // Duration of each frame is 1 second.
   VideoSegment segment;
   TimeStamp now = TimeStamp::Now();
   for (nsTArray<RefPtr<Image>>::size_type i = 0; i < images.Length(); i++) {
     RefPtr<Image> image = images[i];
-    segment.AppendFrame(image.forget(), 1, generator.GetSize(),
+    segment.AppendFrame(image.forget(), generator.GetSize(),
                         PRINCIPAL_HANDLE_NONE, false,
                         now + TimeDuration::FromSeconds(i));
   }
 
   encoder.SetStartOffset(now);
   encoder.AppendVideoSegment(std::move(segment));
   encoder.AdvanceCurrentTime(now + TimeDuration::FromSeconds(images.Length()));
 
@@ -299,17 +299,17 @@ TEST(VP8VideoTrackEncoder, FrameEncode) 
 TEST(VP8VideoTrackEncoder, SingleFrameEncode) {
   TestVP8TrackEncoder encoder;
 
   // Pass a half-second frame to the encoder.
   YUVBufferGenerator generator;
   generator.Init(mozilla::gfx::IntSize(640, 480));
   VideoSegment segment;
   TimeStamp now = TimeStamp::Now();
-  segment.AppendFrame(generator.GenerateI420Image(), 1, generator.GetSize(),
+  segment.AppendFrame(generator.GenerateI420Image(), generator.GetSize(),
                       PRINCIPAL_HANDLE_NONE, false, now);
 
   encoder.SetStartOffset(now);
   encoder.AppendVideoSegment(std::move(segment));
   encoder.AdvanceCurrentTime(now + TimeDuration::FromSeconds(0.5));
   encoder.NotifyEndOfStream();
 
   EncodedFrameContainer container;
@@ -335,17 +335,17 @@ TEST(VP8VideoTrackEncoder, SameFrameEnco
 
   // Pass 15 100ms frames to the encoder.
   YUVBufferGenerator generator;
   generator.Init(mozilla::gfx::IntSize(640, 480));
   RefPtr<Image> image = generator.GenerateI420Image();
   TimeStamp now = TimeStamp::Now();
   VideoSegment segment;
   for (uint32_t i = 0; i < 15; ++i) {
-    segment.AppendFrame(do_AddRef(image), 1, generator.GetSize(),
+    segment.AppendFrame(do_AddRef(image), generator.GetSize(),
                         PRINCIPAL_HANDLE_NONE, false,
                         now + TimeDuration::FromSeconds(i * 0.1));
   }
 
   encoder.SetStartOffset(now);
   encoder.AppendVideoSegment(std::move(segment));
   encoder.AdvanceCurrentTime(now + TimeDuration::FromSeconds(1.5));
   encoder.NotifyEndOfStream();
@@ -370,17 +370,17 @@ TEST(VP8VideoTrackEncoder, SkippedFrames
   YUVBufferGenerator generator;
   generator.Init(mozilla::gfx::IntSize(640, 480));
   TimeStamp now = TimeStamp::Now();
   VideoSegment segment;
 
   // Pass 100 frames of the shortest possible duration where we don't get
   // rounding errors between input/output rate.
   for (uint32_t i = 0; i < 100; ++i) {
-    segment.AppendFrame(generator.GenerateI420Image(), 1, generator.GetSize(),
+    segment.AppendFrame(generator.GenerateI420Image(), generator.GetSize(),
                         PRINCIPAL_HANDLE_NONE, false,
                         now + TimeDuration::FromMilliseconds(i));
   }
 
   encoder.SetStartOffset(now);
   encoder.AppendVideoSegment(std::move(segment));
   encoder.AdvanceCurrentTime(now + TimeDuration::FromMilliseconds(100));
   encoder.NotifyEndOfStream();
@@ -406,23 +406,23 @@ TEST(VP8VideoTrackEncoder, RoundingError
   generator.Init(mozilla::gfx::IntSize(640, 480));
   TimeStamp now = TimeStamp::Now();
   VideoSegment segment;
 
   // Pass nine frames with timestamps not expressable in 90kHz sample rate,
   // then one frame to make the total duration one second.
   uint32_t usPerFrame = 99999;  // 99.999ms
   for (uint32_t i = 0; i < 9; ++i) {
-    segment.AppendFrame(generator.GenerateI420Image(), 1, generator.GetSize(),
+    segment.AppendFrame(generator.GenerateI420Image(), generator.GetSize(),
                         PRINCIPAL_HANDLE_NONE, false,
                         now + TimeDuration::FromMicroseconds(i * usPerFrame));
   }
 
   // This last frame has timestamp start + 0.9s and duration 0.1s.
-  segment.AppendFrame(generator.GenerateI420Image(), 1, generator.GetSize(),
+  segment.AppendFrame(generator.GenerateI420Image(), generator.GetSize(),
                       PRINCIPAL_HANDLE_NONE, false,
                       now + TimeDuration::FromSeconds(0.9));
 
   encoder.SetStartOffset(now);
   encoder.AppendVideoSegment(std::move(segment));
   encoder.AdvanceCurrentTime(now + TimeDuration::FromSeconds(1));
   encoder.NotifyEndOfStream();
 
@@ -443,22 +443,22 @@ TEST(VP8VideoTrackEncoder, RoundingError
 // Test that we're encoding timestamps rather than durations.
 TEST(VP8VideoTrackEncoder, TimestampFrameEncode) {
   TestVP8TrackEncoder encoder;
 
   YUVBufferGenerator generator;
   generator.Init(mozilla::gfx::IntSize(640, 480));
   TimeStamp now = TimeStamp::Now();
   VideoSegment segment;
-  segment.AppendFrame(generator.GenerateI420Image(), 1, generator.GetSize(),
+  segment.AppendFrame(generator.GenerateI420Image(), generator.GetSize(),
                       PRINCIPAL_HANDLE_NONE, false, now);
-  segment.AppendFrame(generator.GenerateI420Image(), 1, generator.GetSize(),
+  segment.AppendFrame(generator.GenerateI420Image(), generator.GetSize(),
                       PRINCIPAL_HANDLE_NONE, false,
                       now + TimeDuration::FromSeconds(0.05));
-  segment.AppendFrame(generator.GenerateI420Image(), 1, generator.GetSize(),
+  segment.AppendFrame(generator.GenerateI420Image(), generator.GetSize(),
                       PRINCIPAL_HANDLE_NONE, false,
                       now + TimeDuration::FromSeconds(0.2));
 
   encoder.SetStartOffset(now);
   encoder.AppendVideoSegment(std::move(segment));
   encoder.AdvanceCurrentTime(now + TimeDuration::FromSeconds(0.3));
   encoder.NotifyEndOfStream();
 
@@ -492,22 +492,22 @@ TEST(VP8VideoTrackEncoder, DriftingFrame
 
   // Set up major drift -- audio that goes twice as fast as video.
   // This should make the given video durations double as they get encoded.
   EXPECT_CALL(*encoder.DriftCompensator(), GetVideoTime(_, _))
       .WillRepeatedly(Invoke(
           [&](TimeStamp, TimeStamp aTime) { return now + (aTime - now) * 2; }));
 
   VideoSegment segment;
-  segment.AppendFrame(generator.GenerateI420Image(), 1, generator.GetSize(),
+  segment.AppendFrame(generator.GenerateI420Image(), generator.GetSize(),
                       PRINCIPAL_HANDLE_NONE, false, now);
-  segment.AppendFrame(generator.GenerateI420Image(), 1, generator.GetSize(),
+  segment.AppendFrame(generator.GenerateI420Image(), generator.GetSize(),
                       PRINCIPAL_HANDLE_NONE, false,
                       now + TimeDuration::FromSeconds(0.05));
-  segment.AppendFrame(generator.GenerateI420Image(), 1, generator.GetSize(),
+  segment.AppendFrame(generator.GenerateI420Image(), generator.GetSize(),
                       PRINCIPAL_HANDLE_NONE, false,
                       now + TimeDuration::FromSeconds(0.2));
 
   encoder.SetStartOffset(now);
   encoder.AppendVideoSegment(std::move(segment));
   encoder.AdvanceCurrentTime(now + TimeDuration::FromSeconds(0.3));
   encoder.NotifyEndOfStream();
 
@@ -536,34 +536,34 @@ TEST(VP8VideoTrackEncoder, Suspended) {
   TestVP8TrackEncoder encoder;
 
   // Pass 3 frames with duration 0.1s. We suspend before and resume after the
   // second frame.
   YUVBufferGenerator generator;
   generator.Init(mozilla::gfx::IntSize(640, 480));
   TimeStamp now = TimeStamp::Now();
   VideoSegment segment;
-  segment.AppendFrame(generator.GenerateI420Image(), 1, generator.GetSize(),
+  segment.AppendFrame(generator.GenerateI420Image(), generator.GetSize(),
                       PRINCIPAL_HANDLE_NONE, false, now);
 
   encoder.SetStartOffset(now);
   encoder.AppendVideoSegment(std::move(segment));
   encoder.AdvanceCurrentTime(now + TimeDuration::FromSeconds(0.1));
 
   encoder.Suspend(now + TimeDuration::FromSeconds(0.1));
 
-  segment.AppendFrame(generator.GenerateI420Image(), 1, generator.GetSize(),
+  segment.AppendFrame(generator.GenerateI420Image(), generator.GetSize(),
                       PRINCIPAL_HANDLE_NONE, false,
                       now + TimeDuration::FromSeconds(0.1));
   encoder.AppendVideoSegment(std::move(segment));
   encoder.AdvanceCurrentTime(now + TimeDuration::FromSeconds(0.2));
 
   encoder.Resume(now + TimeDuration::FromSeconds(0.2));
 
-  segment.AppendFrame(generator.GenerateI420Image(), 1, generator.GetSize(),
+  segment.AppendFrame(generator.GenerateI420Image(), generator.GetSize(),
                       PRINCIPAL_HANDLE_NONE, false,
                       now + TimeDuration::FromSeconds(0.2));
   encoder.AppendVideoSegment(std::move(segment));
   encoder.AdvanceCurrentTime(now + TimeDuration::FromSeconds(0.3));
 
   encoder.NotifyEndOfStream();
 
   EncodedFrameContainer container;
@@ -587,26 +587,26 @@ TEST(VP8VideoTrackEncoder, Suspended) {
 TEST(VP8VideoTrackEncoder, SuspendedUntilEnd) {
   TestVP8TrackEncoder encoder;
 
   // Pass 2 frames with duration 0.1s. We suspend before the second frame.
   YUVBufferGenerator generator;
   generator.Init(mozilla::gfx::IntSize(640, 480));
   TimeStamp now = TimeStamp::Now();
   VideoSegment segment;
-  segment.AppendFrame(generator.GenerateI420Image(), 1, generator.GetSize(),
+  segment.AppendFrame(generator.GenerateI420Image(), generator.GetSize(),
                       PRINCIPAL_HANDLE_NONE, false, now);
 
   encoder.SetStartOffset(now);
   encoder.AppendVideoSegment(std::move(segment));
   encoder.AdvanceCurrentTime(now + TimeDuration::FromSeconds(0.1));
 
   encoder.Suspend(now + TimeDuration::FromSeconds(0.1));
 
-  segment.AppendFrame(generator.GenerateI420Image(), 1, generator.GetSize(),
+  segment.AppendFrame(generator.GenerateI420Image(), generator.GetSize(),
                       PRINCIPAL_HANDLE_NONE, false,
                       now + TimeDuration::FromSeconds(0.1));
   encoder.AppendVideoSegment(std::move(segment));
   encoder.AdvanceCurrentTime(now + TimeDuration::FromSeconds(0.2));
 
   encoder.NotifyEndOfStream();
 
   EncodedFrameContainer container;
@@ -634,17 +634,17 @@ TEST(VP8VideoTrackEncoder, AlwaysSuspend
   YUVBufferGenerator generator;
   generator.Init(mozilla::gfx::IntSize(640, 480));
 
   TimeStamp now = TimeStamp::Now();
 
   encoder.Suspend(now);
 
   VideoSegment segment;
-  segment.AppendFrame(generator.GenerateI420Image(), 1, generator.GetSize(),
+  segment.AppendFrame(generator.GenerateI420Image(), generator.GetSize(),
                       PRINCIPAL_HANDLE_NONE, false, now);
 
   encoder.SetStartOffset(now);
   encoder.AppendVideoSegment(std::move(segment));
   encoder.AdvanceCurrentTime(now + TimeDuration::FromSeconds(2));
 
   encoder.NotifyEndOfStream();
 
@@ -664,26 +664,26 @@ TEST(VP8VideoTrackEncoder, SuspendedBegi
   TimeStamp now = TimeStamp::Now();
 
   // Suspend and pass a frame with duration 0.5s. Then resume and pass one more.
   encoder.Suspend(now);
 
   YUVBufferGenerator generator;
   generator.Init(mozilla::gfx::IntSize(640, 480));
   VideoSegment segment;
-  segment.AppendFrame(generator.GenerateI420Image(), 1, generator.GetSize(),
+  segment.AppendFrame(generator.GenerateI420Image(), generator.GetSize(),
                       PRINCIPAL_HANDLE_NONE, false, now);
 
   encoder.SetStartOffset(now);
   encoder.AppendVideoSegment(std::move(segment));
   encoder.AdvanceCurrentTime(now + TimeDuration::FromSeconds(0.5));
 
   encoder.Resume(now + TimeDuration::FromSeconds(0.5));
 
-  segment.AppendFrame(generator.GenerateI420Image(), 1, generator.GetSize(),
+  segment.AppendFrame(generator.GenerateI420Image(), generator.GetSize(),
                       PRINCIPAL_HANDLE_NONE, false,
                       now + TimeDuration::FromSeconds(0.5));
   encoder.AppendVideoSegment(std::move(segment));
   encoder.AdvanceCurrentTime(now + TimeDuration::FromSeconds(1));
 
   encoder.NotifyEndOfStream();
 
   EncodedFrameContainer container;
@@ -708,27 +708,27 @@ TEST(VP8VideoTrackEncoder, SuspendedBegi
 TEST(VP8VideoTrackEncoder, SuspendedOverlap) {
   TestVP8TrackEncoder encoder;
 
   // Pass a 1s frame and suspend after 0.5s.
   YUVBufferGenerator generator;
   generator.Init(mozilla::gfx::IntSize(640, 480));
   TimeStamp now = TimeStamp::Now();
   VideoSegment segment;
-  segment.AppendFrame(generator.GenerateI420Image(), 1, generator.GetSize(),
+  segment.AppendFrame(generator.GenerateI420Image(), generator.GetSize(),
                       PRINCIPAL_HANDLE_NONE, false, now);
 
   encoder.SetStartOffset(now);
   encoder.AppendVideoSegment(std::move(segment));
 
   encoder.AdvanceCurrentTime(now + TimeDuration::FromSeconds(0.5));
   encoder.Suspend(now + TimeDuration::FromSeconds(0.5));
 
   // Pass another 1s frame and resume after 0.3 of this new frame.
-  segment.AppendFrame(generator.GenerateI420Image(), 1, generator.GetSize(),
+  segment.AppendFrame(generator.GenerateI420Image(), generator.GetSize(),
                       PRINCIPAL_HANDLE_NONE, false,
                       now + TimeDuration::FromSeconds(1));
   encoder.AppendVideoSegment(std::move(segment));
   encoder.AdvanceCurrentTime(now + TimeDuration::FromSeconds(1.3));
   encoder.Resume(now + TimeDuration::FromSeconds(1.3));
   encoder.AdvanceCurrentTime(now + TimeDuration::FromSeconds(2));
 
   encoder.NotifyEndOfStream();
@@ -754,17 +754,17 @@ TEST(VP8VideoTrackEncoder, SuspendedOver
 TEST(VP8VideoTrackEncoder, PrematureEnding) {
   TestVP8TrackEncoder encoder;
 
   // Pass a 1s frame and end the track after 0.5s.
   YUVBufferGenerator generator;
   generator.Init(mozilla::gfx::IntSize(640, 480));
   TimeStamp now = TimeStamp::Now();
   VideoSegment segment;
-  segment.AppendFrame(generator.GenerateI420Image(), 1, generator.GetSize(),
+  segment.AppendFrame(generator.GenerateI420Image(), generator.GetSize(),
                       PRINCIPAL_HANDLE_NONE, false, now);
 
   encoder.SetStartOffset(now);
   encoder.AppendVideoSegment(std::move(segment));
   encoder.AdvanceCurrentTime(now + TimeDuration::FromSeconds(0.5));
   encoder.NotifyEndOfStream();
 
   EncodedFrameContainer container;
@@ -785,17 +785,17 @@ TEST(VP8VideoTrackEncoder, DelayedStart)
   TestVP8TrackEncoder encoder;
 
   // Pass a 2s frame, start (pass first CurrentTime) at 0.5s, end at 1s.
   // Should result in a 0.5s encoding.
   YUVBufferGenerator generator;
   generator.Init(mozilla::gfx::IntSize(640, 480));
   TimeStamp now = TimeStamp::Now();
   VideoSegment segment;
-  segment.AppendFrame(generator.GenerateI420Image(), 1, generator.GetSize(),
+  segment.AppendFrame(generator.GenerateI420Image(), generator.GetSize(),
                       PRINCIPAL_HANDLE_NONE, false, now);
 
   encoder.SetStartOffset(now + TimeDuration::FromSeconds(0.5));
   encoder.AppendVideoSegment(std::move(segment));
   encoder.AdvanceCurrentTime(now + TimeDuration::FromSeconds(1));
   encoder.NotifyEndOfStream();
 
   EncodedFrameContainer container;
@@ -817,17 +817,17 @@ TEST(VP8VideoTrackEncoder, DelayedStartO
   TestVP8TrackEncoder encoder;
 
   // Pass a 2s frame, start (pass first CurrentTime) at 0.5s, end at 1s.
   // Should result in a 0.5s encoding.
   YUVBufferGenerator generator;
   generator.Init(mozilla::gfx::IntSize(640, 480));
   TimeStamp now = TimeStamp::Now();
   VideoSegment segment;
-  segment.AppendFrame(generator.GenerateI420Image(), 1, generator.GetSize(),
+  segment.AppendFrame(generator.GenerateI420Image(), generator.GetSize(),
                       PRINCIPAL_HANDLE_NONE, false, now);
 
   encoder.AppendVideoSegment(std::move(segment));
   encoder.SetStartOffset(now + TimeDuration::FromSeconds(0.5));
   encoder.AdvanceCurrentTime(now + TimeDuration::FromSeconds(1));
   encoder.NotifyEndOfStream();
 
   EncodedFrameContainer container;
@@ -848,17 +848,17 @@ TEST(VP8VideoTrackEncoder, VeryDelayedSt
   TestVP8TrackEncoder encoder;
 
   // Pass a 1s frame, start (pass first CurrentTime) at 10s, end at 10.5s.
   // Should result in a 0.5s encoding.
   YUVBufferGenerator generator;
   generator.Init(mozilla::gfx::IntSize(640, 480));
   TimeStamp now = TimeStamp::Now();
   VideoSegment segment;
-  segment.AppendFrame(generator.GenerateI420Image(), 1, generator.GetSize(),
+  segment.AppendFrame(generator.GenerateI420Image(), generator.GetSize(),
                       PRINCIPAL_HANDLE_NONE, false, now);
 
   encoder.SetStartOffset(now + TimeDuration::FromSeconds(10));
   encoder.AppendVideoSegment(std::move(segment));
   encoder.AdvanceCurrentTime(now + TimeDuration::FromSeconds(10.5));
   encoder.NotifyEndOfStream();
 
   EncodedFrameContainer container;
@@ -882,17 +882,17 @@ TEST(VP8VideoTrackEncoder, LongFramesReE
   // Pass a frame at t=0 and start encoding.
   // Advancing the current time by 1.5s should encode a 1s frame.
   // Advancing the current time by another 9.5s should encode another 10 1s
   // frames.
   YUVBufferGenerator generator;
   generator.Init(mozilla::gfx::IntSize(640, 480));
   TimeStamp now = TimeStamp::Now();
   VideoSegment segment;
-  segment.AppendFrame(generator.GenerateI420Image(), 1, generator.GetSize(),
+  segment.AppendFrame(generator.GenerateI420Image(), generator.GetSize(),
                       PRINCIPAL_HANDLE_NONE, false, now);
 
   encoder.SetStartOffset(now);
   encoder.AppendVideoSegment(std::move(segment));
 
   {
     encoder.AdvanceCurrentTime(now + TimeDuration::FromSeconds(1.5));
 
@@ -934,31 +934,31 @@ TEST(VP8VideoTrackEncoder, ShortKeyFrame
 
   // Give the encoder a keyframe interval of 500ms.
   // Pass frames at 0, 400ms, 600ms, 750ms, 900ms, 1100ms
   // Expected keys: ^         ^^^^^                ^^^^^^
   YUVBufferGenerator generator;
   generator.Init(mozilla::gfx::IntSize(640, 480));
   TimeStamp now = TimeStamp::Now();
   VideoSegment segment;
-  segment.AppendFrame(generator.GenerateI420Image(), 1, generator.GetSize(),
+  segment.AppendFrame(generator.GenerateI420Image(), generator.GetSize(),
                       PRINCIPAL_HANDLE_NONE, false, now);
-  segment.AppendFrame(generator.GenerateI420Image(), 1, generator.GetSize(),
+  segment.AppendFrame(generator.GenerateI420Image(), generator.GetSize(),
                       PRINCIPAL_HANDLE_NONE, false,
                       now + TimeDuration::FromMilliseconds(400));
-  segment.AppendFrame(generator.GenerateI420Image(), 1, generator.GetSize(),
+  segment.AppendFrame(generator.GenerateI420Image(), generator.GetSize(),
                       PRINCIPAL_HANDLE_NONE, false,
                       now + TimeDuration::FromMilliseconds(600));
-  segment.AppendFrame(generator.GenerateI420Image(), 1, generator.GetSize(),
+  segment.AppendFrame(generator.GenerateI420Image(), generator.GetSize(),
                       PRINCIPAL_HANDLE_NONE, false,
                       now + TimeDuration::FromMilliseconds(750));
-  segment.AppendFrame(generator.GenerateI420Image(), 1, generator.GetSize(),
+  segment.AppendFrame(generator.GenerateI420Image(), generator.GetSize(),
                       PRINCIPAL_HANDLE_NONE, false,
                       now + TimeDuration::FromMilliseconds(900));
-  segment.AppendFrame(generator.GenerateI420Image(), 1, generator.GetSize(),
+  segment.AppendFrame(generator.GenerateI420Image(), generator.GetSize(),
                       PRINCIPAL_HANDLE_NONE, false,
                       now + TimeDuration::FromMilliseconds(1100));
 
   encoder.SetKeyFrameInterval(500);
   encoder.SetStartOffset(now);
   encoder.AppendVideoSegment(std::move(segment));
   encoder.AdvanceCurrentTime(now + TimeDuration::FromSeconds(1.2));
   encoder.NotifyEndOfStream();
@@ -1003,31 +1003,31 @@ TEST(VP8VideoTrackEncoder, LongKeyFrameI
 
   // Give the encoder a keyframe interval of 2000ms.
   // Pass frames at 0, 600ms, 900ms, 1100ms, 1900ms, 2100ms
   // Expected keys: ^                ^^^^^^          ^^^^^^
   YUVBufferGenerator generator;
   generator.Init(mozilla::gfx::IntSize(640, 480));
   TimeStamp now = TimeStamp::Now();
   VideoSegment segment;
-  segment.AppendFrame(generator.GenerateI420Image(), 1, generator.GetSize(),
+  segment.AppendFrame(generator.GenerateI420Image(), generator.GetSize(),
                       PRINCIPAL_HANDLE_NONE, false, now);
-  segment.AppendFrame(generator.GenerateI420Image(), 1, generator.GetSize(),
+  segment.AppendFrame(generator.GenerateI420Image(), generator.GetSize(),
                       PRINCIPAL_HANDLE_NONE, false,
                       now + TimeDuration::FromMilliseconds(600));
-  segment.AppendFrame(generator.GenerateI420Image(), 1, generator.GetSize(),
+  segment.AppendFrame(generator.GenerateI420Image(), generator.GetSize(),
                       PRINCIPAL_HANDLE_NONE, false,
                       now + TimeDuration::FromMilliseconds(900));
-  segment.AppendFrame(generator.GenerateI420Image(), 1, generator.GetSize(),
+  segment.AppendFrame(generator.GenerateI420Image(), generator.GetSize(),
                       PRINCIPAL_HANDLE_NONE, false,
                       now + TimeDuration::FromMilliseconds(1100));
-  segment.AppendFrame(generator.GenerateI420Image(), 1, generator.GetSize(),
+  segment.AppendFrame(generator.GenerateI420Image(), generator.GetSize(),
                       PRINCIPAL_HANDLE_NONE, false,
                       now + TimeDuration::FromMilliseconds(1900));
-  segment.AppendFrame(generator.GenerateI420Image(), 1, generator.GetSize(),
+  segment.AppendFrame(generator.GenerateI420Image(), generator.GetSize(),
                       PRINCIPAL_HANDLE_NONE, false,
                       now + TimeDuration::FromMilliseconds(2100));
 
   encoder.SetKeyFrameInterval(2000);
   encoder.SetStartOffset(now);
   encoder.AppendVideoSegment(std::move(segment));
   encoder.AdvanceCurrentTime(now + TimeDuration::FromSeconds(2.2));
   encoder.NotifyEndOfStream();
@@ -1071,31 +1071,31 @@ TEST(VP8VideoTrackEncoder, DefaultKeyFra
   TestVP8TrackEncoder encoder;
 
   // Pass frames at 0, 600ms, 900ms, 1100ms, 1900ms, 2100ms
   // Expected keys: ^                ^^^^^^          ^^^^^^
   YUVBufferGenerator generator;
   generator.Init(mozilla::gfx::IntSize(640, 480));
   TimeStamp now = TimeStamp::Now();
   VideoSegment segment;
-  segment.AppendFrame(generator.GenerateI420Image(), 1, generator.GetSize(),
+  segment.AppendFrame(generator.GenerateI420Image(), generator.GetSize(),
                       PRINCIPAL_HANDLE_NONE, false, now);
-  segment.AppendFrame(generator.GenerateI420Image(), 1, generator.GetSize(),
+  segment.AppendFrame(generator.GenerateI420Image(), generator.GetSize(),
                       PRINCIPAL_HANDLE_NONE, false,
                       now + TimeDuration::FromMilliseconds(600));
-  segment.AppendFrame(generator.GenerateI420Image(), 1, generator.GetSize(),
+  segment.AppendFrame(generator.GenerateI420Image(), generator.GetSize(),
                       PRINCIPAL_HANDLE_NONE, false,
                       now + TimeDuration::FromMilliseconds(900));
-  segment.AppendFrame(generator.GenerateI420Image(), 1, generator.GetSize(),
+  segment.AppendFrame(generator.GenerateI420Image(), generator.GetSize(),
                       PRINCIPAL_HANDLE_NONE, false,
                       now + TimeDuration::FromMilliseconds(1100));
-  segment.AppendFrame(generator.GenerateI420Image(), 1, generator.GetSize(),
+  segment.AppendFrame(generator.GenerateI420Image(), generator.GetSize(),
                       PRINCIPAL_HANDLE_NONE, false,
                       now + TimeDuration::FromMilliseconds(1900));
-  segment.AppendFrame(generator.GenerateI420Image(), 1, generator.GetSize(),
+  segment.AppendFrame(generator.GenerateI420Image(), generator.GetSize(),
                       PRINCIPAL_HANDLE_NONE, false,
                       now + TimeDuration::FromMilliseconds(2100));
 
   encoder.SetStartOffset(now);
   encoder.AppendVideoSegment(std::move(segment));
   encoder.AdvanceCurrentTime(now + TimeDuration::FromSeconds(2.2));
   encoder.NotifyEndOfStream();
 
@@ -1148,79 +1148,79 @@ TEST(VP8VideoTrackEncoder, DynamicKeyFra
   // Then decrease keyframe interval to 200ms.
   // Pass frames at 2500ms, 2600ms, 2800ms, 2900ms
   // Expected keys:         ^^^^^^  ^^^^^^
   YUVBufferGenerator generator;
   generator.Init(mozilla::gfx::IntSize(640, 480));
   EncodedFrameContainer container;
   TimeStamp now = TimeStamp::Now();
   VideoSegment segment;
-  segment.AppendFrame(generator.GenerateI420Image(), 1, generator.GetSize(),
+  segment.AppendFrame(generator.GenerateI420Image(), generator.GetSize(),
                       PRINCIPAL_HANDLE_NONE, false, now);
-  segment.AppendFrame(generator.GenerateI420Image(), 1, generator.GetSize(),
+  segment.AppendFrame(generator.GenerateI420Image(), generator.GetSize(),
                       PRINCIPAL_HANDLE_NONE, false,
                       now + TimeDuration::FromMilliseconds(100));
-  segment.AppendFrame(generator.GenerateI420Image(), 1, generator.GetSize(),
+  segment.AppendFrame(generator.GenerateI420Image(), generator.GetSize(),
                       PRINCIPAL_HANDLE_NONE, false,
                       now + TimeDuration::FromMilliseconds(120));
-  segment.AppendFrame(generator.GenerateI420Image(), 1, generator.GetSize(),
+  segment.AppendFrame(generator.GenerateI420Image(), generator.GetSize(),
                       PRINCIPAL_HANDLE_NONE, false,
                       now + TimeDuration::FromMilliseconds(130));
-  segment.AppendFrame(generator.GenerateI420Image(), 1, generator.GetSize(),
+  segment.AppendFrame(generator.GenerateI420Image(), generator.GetSize(),
                       PRINCIPAL_HANDLE_NONE, false,
                       now + TimeDuration::FromMilliseconds(200));
-  segment.AppendFrame(generator.GenerateI420Image(), 1, generator.GetSize(),
+  segment.AppendFrame(generator.GenerateI420Image(), generator.GetSize(),
                       PRINCIPAL_HANDLE_NONE, false,
                       now + TimeDuration::FromMilliseconds(300));
 
   // The underlying encoder only gets passed frame N when frame N+1 is known,
   // so we pass in the next frame *before* the keyframe interval change.
-  segment.AppendFrame(generator.GenerateI420Image(), 1, generator.GetSize(),
+  segment.AppendFrame(generator.GenerateI420Image(), generator.GetSize(),
                       PRINCIPAL_HANDLE_NONE, false,
                       now + TimeDuration::FromMilliseconds(500));
 
   encoder.SetStartOffset(now);
   encoder.SetKeyFrameInterval(100);
   encoder.AppendVideoSegment(std::move(segment));
 
   // Advancing 501ms, so the first bit of the frame starting at 500ms is
   // included.
   encoder.AdvanceCurrentTime(now + TimeDuration::FromMilliseconds(501));
   ASSERT_TRUE(NS_SUCCEEDED(encoder.GetEncodedTrack(container)));
 
-  segment.AppendFrame(generator.GenerateI420Image(), 1, generator.GetSize(),
+  segment.AppendFrame(generator.GenerateI420Image(), generator.GetSize(),
                       PRINCIPAL_HANDLE_NONE, false,
                       now + TimeDuration::FromMilliseconds(1300));
-  segment.AppendFrame(generator.GenerateI420Image(), 1, generator.GetSize(),
+  segment.AppendFrame(generator.GenerateI420Image(), generator.GetSize(),
                       PRINCIPAL_HANDLE_NONE, false,
                       now + TimeDuration::FromMilliseconds(1400));
-  segment.AppendFrame(generator.GenerateI420Image(), 1, generator.GetSize(),
+  segment.AppendFrame(generator.GenerateI420Image(), generator.GetSize(),
                       PRINCIPAL_HANDLE_NONE, false,
                       now + TimeDuration::FromMilliseconds(2400));
 
   // The underlying encoder only gets passed frame N when frame N+1 is known,
   // so we pass in the next frame *before* the keyframe interval change.
-  segment.AppendFrame(generator.GenerateI420Image(), 1, generator.GetSize(),
+  segment.AppendFrame(generator.GenerateI420Image(), generator.GetSize(),
                       PRINCIPAL_HANDLE_NONE, false,
                       now + TimeDuration::FromMilliseconds(2500));
 
   encoder.SetKeyFrameInterval(1100);
   encoder.AppendVideoSegment(std::move(segment));
 
   // Advancing 2000ms from 501ms to 2501ms
   encoder.AdvanceCurrentTime(now + TimeDuration::FromMilliseconds(2501));
   ASSERT_TRUE(NS_SUCCEEDED(encoder.GetEncodedTrack(container)));
 
-  segment.AppendFrame(generator.GenerateI420Image(), 1, generator.GetSize(),
+  segment.AppendFrame(generator.GenerateI420Image(), generator.GetSize(),
                       PRINCIPAL_HANDLE_NONE, false,
                       now + TimeDuration::FromMilliseconds(2600));
-  segment.AppendFrame(generator.GenerateI420Image(), 1, generator.GetSize(),
+  segment.AppendFrame(generator.GenerateI420Image(), generator.GetSize(),
                       PRINCIPAL_HANDLE_NONE, false,
                       now + TimeDuration::FromMilliseconds(2800));
-  segment.AppendFrame(generator.GenerateI420Image(), 1, generator.GetSize(),
+  segment.AppendFrame(generator.GenerateI420Image(), generator.GetSize(),
                       PRINCIPAL_HANDLE_NONE, false,
                       now + TimeDuration::FromMilliseconds(2900));
 
   encoder.SetKeyFrameInterval(200);
   encoder.AppendVideoSegment(std::move(segment));
 
   // Advancing 499ms (compensating back 1ms from the first advancement)
   // from 2501ms to 3000ms.
--- a/dom/media/mediasink/DecodedStream.cpp
+++ b/dom/media/mediasink/DecodedStream.cpp
@@ -591,19 +591,22 @@ static void WriteVideoToMediaStream(Medi
                                     const mozilla::gfx::IntSize& aIntrinsicSize,
                                     const TimeStamp& aTimeStamp,
                                     VideoSegment* aOutput,
                                     const PrincipalHandle& aPrincipalHandle) {
   RefPtr<layers::Image> image = aImage;
   auto end = aStream->MicrosecondsToStreamTimeRoundDown(aEnd.ToMicroseconds());
   auto start =
       aStream->MicrosecondsToStreamTimeRoundDown(aStart.ToMicroseconds());
-  StreamTime duration = end - start;
-  aOutput->AppendFrame(image.forget(), duration, aIntrinsicSize,
-                       aPrincipalHandle, false, aTimeStamp);
+  aOutput->AppendFrame(image.forget(), aIntrinsicSize, aPrincipalHandle, false,
+                       aTimeStamp);
+  // Extend this so we get accurate durations for all frames.
+  // Because this track is pushed, we need durations so the graph can track
+  // when playout of the track has finished.
+  aOutput->ExtendLastFrameBy(end - start);
 }
 
 static bool ZeroDurationAtLastChunk(VideoSegment& aInput) {
   // Get the last video frame's start time in VideoSegment aInput.
   // If the start time is equal to the duration of aInput, means the last video
   // frame's duration is zero.
   StreamTime lastVideoStratTime;
   aInput.GetLastFrame(&lastVideoStratTime);
--- a/dom/media/webrtc/MediaEngineDefault.cpp
+++ b/dom/media/webrtc/MediaEngineDefault.cpp
@@ -318,17 +318,17 @@ void MediaEngineDefaultVideoSource::Gene
   // SetData copies data, so we can free the frame
   ReleaseFrame(data);
 
   if (!setData) {
     return;
   }
 
   VideoSegment segment;
-  segment.AppendFrame(ycbcr_image.forget(), 1,
+  segment.AppendFrame(ycbcr_image.forget(),
                       gfx::IntSize(mOpts.mWidth, mOpts.mHeight),
                       mPrincipalHandle);
   ;
   mStream->AppendToTrack(mTrackID, &segment);
 }
 
 void MediaEngineDefaultVideoSource::Pull(
     const RefPtr<const AllocationHandle>& aHandle,
--- a/dom/media/webrtc/MediaEngineRemoteVideoSource.cpp
+++ b/dom/media/webrtc/MediaEngineRemoteVideoSource.cpp
@@ -633,17 +633,17 @@ int MediaEngineRemoteVideoSource::Delive
         }));
   }
 
   {
     MutexAutoLock lock(mMutex);
     MOZ_ASSERT(mState == kStarted);
     VideoSegment segment;
     mImageSize = image->GetSize();
-    segment.AppendFrame(image.forget(), 1, mImageSize, mPrincipal);
+    segment.AppendFrame(image.forget(), mImageSize, mPrincipal);
     mStream->AppendToTrack(mTrackID, &segment);
   }
 
   return 0;
 }
 
 uint32_t MediaEngineRemoteVideoSource::GetDistance(
     const webrtc::CaptureCapability& aCandidate,
--- a/dom/media/webrtc/MediaEngineTabVideoSource.cpp
+++ b/dom/media/webrtc/MediaEngineTabVideoSource.cpp
@@ -379,17 +379,17 @@ void MediaEngineTabVideoSource::Draw() {
       NS_ENSURE_SUCCESS_VOID(
           presShell->RenderDocument(r, renderDocFlags, bgColor, context));
     } else {
       dt->ClearRect(Rect(0, 0, size.width, size.height));
     }
   }
 
   VideoSegment segment;
-  segment.AppendFrame(do_AddRef(rgbImage), 1, size, mPrincipalHandle);
+  segment.AppendFrame(do_AddRef(rgbImage), size, mPrincipalHandle);
   // This can fail if either a) we haven't added the track yet, or b)
   // we've removed or ended the track.
   mStreamMain->AppendToTrack(mTrackIDMain, &segment);
 }
 
 nsresult MediaEngineTabVideoSource::FocusOnSelectedSource(
     const RefPtr<const AllocationHandle>& aHandle) {
   return NS_ERROR_NOT_IMPLEMENTED;
--- a/media/webrtc/signaling/src/mediapipeline/MediaPipeline.cpp
+++ b/media/webrtc/signaling/src/mediapipeline/MediaPipeline.cpp
@@ -1777,17 +1777,17 @@ class MediaPipelineReceiveVideo::Pipelin
         return;
       }
 
       image = yuvImage.forget();
     }
 
     VideoSegment segment;
     auto size = image->GetSize();
-    segment.AppendFrame(image.forget(), 1, size, mPrincipalHandle);
+    segment.AppendFrame(image.forget(), size, mPrincipalHandle);
     mSource->AppendToTrack(mTrackId, &segment);
   }
 
  private:
   RefPtr<layers::ImageContainer> mImageContainer;
 };
 
 class MediaPipelineReceiveVideo::PipelineRenderer