Bug 1109644. Track data-produced-so-far in the MediaEngineSources themselves, so if we push data into the MediaStream for any reason we can account for it. r=jesup
☠☠ backed out by b62d82398258 ☠ ☠
authorRobert O'Callahan <robert@ocallahan.org>
Wed, 17 Dec 2014 06:53:17 -0500
changeset 220187 0661a1b7ec1466e1722e179521d865efc81f7ad9
parent 220186 23c386e2acdc8414a900604b3f484f9d6ed7434f
child 220188 a82581055c06f209bc9b7e21da64e519e1fab29f
push idunknown
push userunknown
push dateunknown
reviewersjesup
bugs1109644
milestone37.0a1
Bug 1109644. Track data-produced-so-far in the MediaEngineSources themselves, so if we push data into the MediaStream for any reason we can account for it. r=jesup
dom/media/MediaManager.h
dom/media/webrtc/MediaEngine.h
dom/media/webrtc/MediaEngineCameraVideoSource.cpp
dom/media/webrtc/MediaEngineCameraVideoSource.h
dom/media/webrtc/MediaEngineDefault.cpp
dom/media/webrtc/MediaEngineDefault.h
dom/media/webrtc/MediaEngineGonkVideoSource.cpp
dom/media/webrtc/MediaEngineGonkVideoSource.h
dom/media/webrtc/MediaEngineTabVideoSource.cpp
dom/media/webrtc/MediaEngineTabVideoSource.h
dom/media/webrtc/MediaEngineWebRTC.h
dom/media/webrtc/MediaEngineWebRTCAudio.cpp
dom/media/webrtc/MediaEngineWebRTCVideo.cpp
--- a/dom/media/MediaManager.h
+++ b/dom/media/MediaManager.h
@@ -84,18 +84,16 @@ public:
   void Activate(already_AddRefed<SourceMediaStream> aStream,
     MediaEngineSource* aAudioSource,
     MediaEngineSource* aVideoSource)
   {
     NS_ASSERTION(NS_IsMainThread(), "Only call on main thread");
     mStream = aStream;
     mAudioSource = aAudioSource;
     mVideoSource = aVideoSource;
-    mLastEndTimeAudio = 0;
-    mLastEndTimeVideo = 0;
 
     mStream->AddListener(this);
   }
 
   MediaStream *Stream() // Can be used to test if Activate was called
   {
     return mStream;
   }
@@ -182,20 +180,20 @@ public:
 
   // Proxy NotifyPull() to sources
   virtual void
   NotifyPull(MediaStreamGraph* aGraph, StreamTime aDesiredTime) MOZ_OVERRIDE
   {
     // Currently audio sources ignore NotifyPull, but they could
     // watch it especially for fake audio.
     if (mAudioSource) {
-      mAudioSource->NotifyPull(aGraph, mStream, kAudioTrack, aDesiredTime, mLastEndTimeAudio);
+      mAudioSource->NotifyPull(aGraph, mStream, kAudioTrack, aDesiredTime);
     }
     if (mVideoSource) {
-      mVideoSource->NotifyPull(aGraph, mStream, kVideoTrack, aDesiredTime, mLastEndTimeVideo);
+      mVideoSource->NotifyPull(aGraph, mStream, kVideoTrack, aDesiredTime);
     }
   }
 
   virtual void
   NotifyEvent(MediaStreamGraph* aGraph,
               MediaStreamListener::MediaStreamGraphEvent aEvent) MOZ_OVERRIDE
   {
     switch (aEvent) {
@@ -234,18 +232,16 @@ private:
 
   // Set at Activate on MainThread
 
   // Accessed from MediaStreamGraph thread, MediaManager thread, and MainThread
   // No locking needed as they're only addrefed except on the MediaManager thread
   nsRefPtr<MediaEngineSource> mAudioSource; // threadsafe refcnt
   nsRefPtr<MediaEngineSource> mVideoSource; // threadsafe refcnt
   nsRefPtr<SourceMediaStream> mStream; // threadsafe refcnt
-  StreamTime mLastEndTimeAudio;
-  StreamTime mLastEndTimeVideo;
   bool mFinished;
 
   // Accessed from MainThread and MSG thread
   Mutex mLock; // protects mRemoved access from MainThread
   bool mRemoved;
 };
 
 class GetUserMediaNotificationEvent: public nsRunnable
--- a/dom/media/webrtc/MediaEngine.h
+++ b/dom/media/webrtc/MediaEngine.h
@@ -105,18 +105,17 @@ public:
 
   /* tell the source if there are any direct listeners attached */
   virtual void SetDirectListeners(bool) = 0;
 
   /* Called when the stream wants more data */
   virtual void NotifyPull(MediaStreamGraph* aGraph,
                           SourceMediaStream *aSource,
                           TrackID aId,
-                          StreamTime aDesiredTime,
-                          StreamTime &aLastEndTime) = 0;
+                          StreamTime aDesiredTime) = 0;
 
   /* Stop the device and release the corresponding MediaStream */
   virtual nsresult Stop(SourceMediaStream *aSource, TrackID aID) = 0;
 
   /* Change device configuration.  */
   virtual nsresult Config(bool aEchoOn, uint32_t aEcho,
                           bool aAgcOn, uint32_t aAGC,
                           bool aNoiseOn, uint32_t aNoise,
--- a/dom/media/webrtc/MediaEngineCameraVideoSource.cpp
+++ b/dom/media/webrtc/MediaEngineCameraVideoSource.cpp
@@ -56,16 +56,18 @@ bool MediaEngineCameraVideoSource::Appen
 {
   MOZ_ASSERT(aSource);
 
   VideoSegment segment;
   nsRefPtr<layers::Image> image = aImage;
   IntSize size(image ? mWidth : 0, image ? mHeight : 0);
   segment.AppendFrame(image.forget(), delta, size);
 
+  mProducedDuration += delta;
+
   // This is safe from any thread, and is safe if the track is Finished
   // or Destroyed.
   // This can fail if either a) we haven't added the track yet, or b)
   // we've removed or finished the track.
   return aSource->AppendToTrack(aID, &(segment));
 }
 
 // A special version of the algorithm for cameras that don't list capabilities.
--- a/dom/media/webrtc/MediaEngineCameraVideoSource.h
+++ b/dom/media/webrtc/MediaEngineCameraVideoSource.h
@@ -20,16 +20,17 @@ class MediaEngineCameraVideoSource : pub
 {
 public:
   explicit MediaEngineCameraVideoSource(int aIndex,
                                         const char* aMonitorName = "Camera.Monitor")
     : MediaEngineVideoSource(kReleased)
     , mMonitor(aMonitorName)
     , mWidth(0)
     , mHeight(0)
+    , mProducedDuration(0)
     , mInitDone(false)
     , mHasDirectListeners(false)
     , mCaptureIndex(aIndex)
     , mTrackID(0)
     , mFps(-1)
   {}
 
 
@@ -86,16 +87,18 @@ protected:
   Monitor mMonitor; // Monitor for processing Camera frames.
   nsRefPtr<layers::Image> mImage;
   nsRefPtr<layers::ImageContainer> mImageContainer;
   int mWidth, mHeight; // protected with mMonitor on Gonk due to different threading
   // end of data protected by mMonitor
 
   nsTArray<SourceMediaStream*> mSources; // When this goes empty, we shut down HW
 
+  StreamTime mProducedDuration;
+
   bool mInitDone;
   bool mHasDirectListeners;
   int mCaptureIndex;
   TrackID mTrackID;
   int mFps; // Track rate (30 fps by default)
 
   webrtc::CaptureCapability mCapability; // Doesn't work on OS X.
 
--- a/dom/media/webrtc/MediaEngineDefault.cpp
+++ b/dom/media/webrtc/MediaEngineDefault.cpp
@@ -42,16 +42,17 @@ NS_IMPL_ISUPPORTS(MediaEngineDefaultVide
  * Default video source.
  */
 
 MediaEngineDefaultVideoSource::MediaEngineDefaultVideoSource()
   : MediaEngineVideoSource(kReleased)
   , mTimer(nullptr)
   , mMonitor("Fake video")
   , mCb(16), mCr(16)
+  , mProducedDuration(0)
 {
   mImageContainer = layers::LayerManager::CreateImageContainer();
 }
 
 MediaEngineDefaultVideoSource::~MediaEngineDefaultVideoSource()
 {}
 
 void
@@ -239,38 +240,37 @@ MediaEngineDefaultVideoSource::Notify(ns
 
   return NS_OK;
 }
 
 void
 MediaEngineDefaultVideoSource::NotifyPull(MediaStreamGraph* aGraph,
                                           SourceMediaStream *aSource,
                                           TrackID aID,
-                                          StreamTime aDesiredTime,
-                                          StreamTime &aLastEndTime)
+                                          StreamTime aDesiredTime)
 {
   // AddTrack takes ownership of segment
   VideoSegment segment;
   MonitorAutoLock lock(mMonitor);
   if (mState != kStarted) {
     return;
   }
 
   // Note: we're not giving up mImage here
   nsRefPtr<layers::Image> image = mImage;
-  StreamTime delta = aDesiredTime - aLastEndTime;
+  StreamTime delta = aDesiredTime - mProducedDuration;
 
   if (delta > 0) {
     // nullptr images are allowed
     IntSize size(image ? mOpts.mWidth : 0, image ? mOpts.mHeight : 0);
     segment.AppendFrame(image.forget(), delta, size);
     // This can fail if either a) we haven't added the track yet, or b)
     // we've removed or finished the track.
     if (aSource->AppendToTrack(aID, &segment)) {
-      aLastEndTime = aDesiredTime;
+      mProducedDuration = aDesiredTime;
     }
     // Generate null data for fake tracks.
     if (mHasFakeTracks) {
       for (int i = 0; i < kFakeVideoTrackCount; ++i) {
         VideoSegment nullSegment;
         nullSegment.AppendNullData(delta);
         aSource->AppendToTrack(kTrackCount + i, &nullSegment);
       }
--- a/dom/media/webrtc/MediaEngineDefault.h
+++ b/dom/media/webrtc/MediaEngineDefault.h
@@ -48,18 +48,17 @@ public:
   virtual void SetDirectListeners(bool aHasDirectListeners) {};
   virtual nsresult Config(bool aEchoOn, uint32_t aEcho,
                           bool aAgcOn, uint32_t aAGC,
                           bool aNoiseOn, uint32_t aNoise,
                           int32_t aPlayoutDelay) { return NS_OK; };
   virtual void NotifyPull(MediaStreamGraph* aGraph,
                           SourceMediaStream *aSource,
                           TrackID aId,
-                          StreamTime aDesiredTime,
-                          StreamTime &aLastEndTime);
+                          StreamTime aDesiredTime) MOZ_OVERRIDE;
   virtual bool SatisfiesConstraintSets(
       const nsTArray<const dom::MediaTrackConstraintSet*>& aConstraintSets)
   {
     return true;
   }
 
   virtual bool IsFake() {
     return true;
@@ -91,16 +90,18 @@ protected:
   Monitor mMonitor;
   nsRefPtr<layers::Image> mImage;
 
   nsRefPtr<layers::ImageContainer> mImageContainer;
 
   MediaEnginePrefs mOpts;
   int mCb;
   int mCr;
+
+  StreamTime mProducedDuration;
 };
 
 class SineWaveGenerator;
 
 class MediaEngineDefaultAudioSource : public nsITimerCallback,
                                       public MediaEngineAudioSource
 {
 public:
@@ -117,18 +118,17 @@ public:
   virtual void SetDirectListeners(bool aHasDirectListeners) {};
   virtual nsresult Config(bool aEchoOn, uint32_t aEcho,
                           bool aAgcOn, uint32_t aAGC,
                           bool aNoiseOn, uint32_t aNoise,
                           int32_t aPlayoutDelay) { return NS_OK; };
   virtual void NotifyPull(MediaStreamGraph* aGraph,
                           SourceMediaStream *aSource,
                           TrackID aId,
-                          StreamTime aDesiredTime,
-                          StreamTime &aLastEndTime) {}
+                          StreamTime aDesiredTime) {}
 
   virtual bool IsFake() {
     return true;
   }
 
   virtual const MediaSourceType GetMediaSource() {
     return MediaSourceType::Microphone;
   }
--- a/dom/media/webrtc/MediaEngineGonkVideoSource.cpp
+++ b/dom/media/webrtc/MediaEngineGonkVideoSource.cpp
@@ -40,29 +40,28 @@ NS_IMPL_RELEASE_INHERITED(MediaEngineGon
 
 // Called if the graph thinks it's running out of buffered video; repeat
 // the last frame for whatever minimum period it think it needs. Note that
 // this means that no *real* frame can be inserted during this period.
 void
 MediaEngineGonkVideoSource::NotifyPull(MediaStreamGraph* aGraph,
                                        SourceMediaStream* aSource,
                                        TrackID aID,
-                                       StreamTime aDesiredTime,
-                                       StreamTime& aLastEndTime)
+                                       StreamTime aDesiredTime)
 {
   VideoSegment segment;
 
   MonitorAutoLock lock(mMonitor);
   // B2G does AddTrack, but holds kStarted until the hardware changes state.
   // So mState could be kReleased here. We really don't care about the state,
   // though.
 
   // Note: we're not giving up mImage here
   nsRefPtr<layers::Image> image = mImage;
-  StreamTime delta = aDesiredTime - aLastEndTime;
+  StreamTime delta = aDesiredTime - mProducedDuration;
   LOGFRAME(("NotifyPull, desired = %ld, delta = %ld %s", (int64_t) aDesiredTime,
             (int64_t) delta, image ? "" : "<null>"));
 
   // Bug 846188 We may want to limit incoming frames to the requested frame rate
   // mFps - if you want 30FPS, and the camera gives you 60FPS, this could
   // cause issues.
   // We may want to signal if the actual frame rate is below mMinFPS -
   // cameras often don't return the requested frame rate especially in low
@@ -73,17 +72,17 @@ MediaEngineGonkVideoSource::NotifyPull(M
   // Doing so means a negative delta and thus messes up handling of the graph
   if (delta > 0) {
     // nullptr images are allowed
     IntSize size(image ? mWidth : 0, image ? mHeight : 0);
     segment.AppendFrame(image.forget(), delta, size);
     // This can fail if either a) we haven't added the track yet, or b)
     // we've removed or finished the track.
     if (aSource->AppendToTrack(aID, &(segment))) {
-      aLastEndTime = aDesiredTime;
+      mProducedDuration = aDesiredTime;
     }
   }
 }
 
 void
 MediaEngineGonkVideoSource::ChooseCapability(const VideoTrackConstraintsN& aConstraints,
                                              const MediaEnginePrefs& aPrefs)
 {
--- a/dom/media/webrtc/MediaEngineGonkVideoSource.h
+++ b/dom/media/webrtc/MediaEngineGonkVideoSource.h
@@ -43,33 +43,34 @@ class MediaEngineGonkVideoSource : publi
 {
 public:
   NS_DECL_ISUPPORTS_INHERITED
 
   MediaEngineGonkVideoSource(int aIndex)
     : MediaEngineCameraVideoSource(aIndex, "GonkCamera.Monitor")
     , mCallbackMonitor("GonkCamera.CallbackMonitor")
     , mCameraControl(nullptr)
+    , mProducedDuration(0)
     , mRotation(0)
     , mBackCamera(false)
-    , mOrientationChanged(true) // Correct the orientation at first time takePhoto.
+    , mOrientationChanged(true)
+    // Correct the orientation at first time takePhoto.
     {
       Init();
     }
 
   virtual nsresult Allocate(const VideoTrackConstraintsN &aConstraints,
                             const MediaEnginePrefs &aPrefs) MOZ_OVERRIDE;
   virtual nsresult Deallocate() MOZ_OVERRIDE;
   virtual nsresult Start(SourceMediaStream* aStream, TrackID aID) MOZ_OVERRIDE;
   virtual nsresult Stop(SourceMediaStream* aSource, TrackID aID) MOZ_OVERRIDE;
   virtual void NotifyPull(MediaStreamGraph* aGraph,
                           SourceMediaStream* aSource,
                           TrackID aId,
-                          StreamTime aDesiredTime,
-                          StreamTime &aLastEndTime) MOZ_OVERRIDE;
+                          StreamTime aDesiredTime) MOZ_OVERRIDE;
   virtual bool SatisfiesConstraintSets(
       const nsTArray<const dom::MediaTrackConstraintSet*>& aConstraintSets)
   {
     return true;
   }
 
   void OnHardwareStateChange(HardwareState aState, nsresult aReason) MOZ_OVERRIDE;
   void GetRotation();
@@ -101,16 +102,17 @@ protected:
   void Shutdown();
   void ChooseCapability(const VideoTrackConstraintsN& aConstraints,
                         const MediaEnginePrefs& aPrefs);
 
   mozilla::ReentrantMonitor mCallbackMonitor; // Monitor for camera callback handling
   // This is only modified on MainThread (AllocImpl and DeallocImpl)
   nsRefPtr<ICameraControl> mCameraControl;
   nsCOMPtr<nsIDOMFile> mLastCapture;
+  StreamTime mProducedDuration;
 
   // These are protected by mMonitor in parent class
   nsTArray<nsRefPtr<PhotoCallback>> mPhotoCallbacks;
   int mRotation;
   int mCameraAngle; // See dom/base/ScreenOrientation.h
   bool mBackCamera;
   bool mOrientationChanged; // True when screen rotates.
 
--- a/dom/media/webrtc/MediaEngineTabVideoSource.cpp
+++ b/dom/media/webrtc/MediaEngineTabVideoSource.cpp
@@ -28,17 +28,18 @@
 namespace mozilla {
 
 using namespace mozilla::gfx;
 using dom::ConstrainLongRange;
 
 NS_IMPL_ISUPPORTS(MediaEngineTabVideoSource, nsIDOMEventListener, nsITimerCallback)
 
 MediaEngineTabVideoSource::MediaEngineTabVideoSource()
-: mMonitor("MediaEngineTabVideoSource"), mTabSource(nullptr)
+  : mProducedDuration(0)
+  , mMonitor("MediaEngineTabVideoSource")
 {
 }
 
 nsresult
 MediaEngineTabVideoSource::StartRunnable::Run()
 {
   mVideoSource->Draw();
   mVideoSource->mTimer = do_CreateInstance(NS_TIMER_CONTRACTID);
@@ -187,33 +188,32 @@ MediaEngineTabVideoSource::Start(SourceM
   aStream->AdvanceKnownTracksTime(STREAM_TIME_MAX);
 
   return NS_OK;
 }
 
 void
 MediaEngineTabVideoSource::NotifyPull(MediaStreamGraph*,
                                       SourceMediaStream* aSource,
-                                      TrackID aID, StreamTime aDesiredTime,
-                                      StreamTime& aLastEndTime)
+                                      TrackID aID, StreamTime aDesiredTime)
 {
   VideoSegment segment;
   MonitorAutoLock mon(mMonitor);
 
   // Note: we're not giving up mImage here
   nsRefPtr<layers::CairoImage> image = mImage;
-  StreamTime delta = aDesiredTime - aLastEndTime;
+  StreamTime delta = aDesiredTime - mProducedDuration;
   if (delta > 0) {
     // nullptr images are allowed
     gfx::IntSize size = image ? image->GetSize() : IntSize(0, 0);
     segment.AppendFrame(image.forget().downcast<layers::Image>(), delta, size);
     // This can fail if either a) we haven't added the track yet, or b)
     // we've removed or finished the track.
     if (aSource->AppendToTrack(aID, &(segment))) {
-      aLastEndTime = aDesiredTime;
+      mProducedDuration = aDesiredTime;
     }
   }
 }
 
 void
 MediaEngineTabVideoSource::Draw() {
 
   IntSize size(mBufW, mBufH);
--- a/dom/media/webrtc/MediaEngineTabVideoSource.h
+++ b/dom/media/webrtc/MediaEngineTabVideoSource.h
@@ -20,17 +20,17 @@ class MediaEngineTabVideoSource : public
 
     virtual void GetName(nsAString_internal&);
     virtual void GetUUID(nsAString_internal&);
     virtual nsresult Allocate(const VideoTrackConstraintsN &,
                               const mozilla::MediaEnginePrefs&);
     virtual nsresult Deallocate();
     virtual nsresult Start(mozilla::SourceMediaStream*, mozilla::TrackID);
     virtual void SetDirectListeners(bool aHasDirectListeners) {};
-    virtual void NotifyPull(mozilla::MediaStreamGraph*, mozilla::SourceMediaStream*, mozilla::TrackID, mozilla::StreamTime, mozilla::StreamTime&);
+    virtual void NotifyPull(mozilla::MediaStreamGraph*, mozilla::SourceMediaStream*, mozilla::TrackID, mozilla::StreamTime) MOZ_OVERRIDE;
     virtual nsresult Stop(mozilla::SourceMediaStream*, mozilla::TrackID);
     virtual nsresult Config(bool, uint32_t, bool, uint32_t, bool, uint32_t, int32_t);
     virtual bool IsFake();
     virtual const MediaSourceType GetMediaSource() {
       return MediaSourceType::Browser;
     }
     virtual bool SatisfiesConstraintSets(
       const nsTArray<const dom::MediaTrackConstraintSet*>& aConstraintSets)
@@ -74,12 +74,13 @@ private:
     int mBufH;
     int64_t mWindowId;
     bool mScrollWithPage;
     int mTimePerFrame;
     ScopedFreePtr<unsigned char> mData;
     nsCOMPtr<nsIDOMWindow> mWindow;
     nsRefPtr<layers::CairoImage> mImage;
     nsCOMPtr<nsITimer> mTimer;
+    StreamTime mProducedDuration;
     Monitor mMonitor;
     nsCOMPtr<nsITabSource> mTabSource;
   };
 }
--- a/dom/media/webrtc/MediaEngineWebRTC.h
+++ b/dom/media/webrtc/MediaEngineWebRTC.h
@@ -92,18 +92,17 @@ public:
   virtual nsresult Allocate(const VideoTrackConstraintsN& aConstraints,
                             const MediaEnginePrefs& aPrefs);
   virtual nsresult Deallocate();
   virtual nsresult Start(SourceMediaStream*, TrackID);
   virtual nsresult Stop(SourceMediaStream*, TrackID);
   virtual void NotifyPull(MediaStreamGraph* aGraph,
                           SourceMediaStream* aSource,
                           TrackID aId,
-                          StreamTime aDesiredTime,
-                          StreamTime &aLastEndTime);
+                          StreamTime aDesiredTime) MOZ_OVERRIDE;
 
   virtual const MediaSourceType GetMediaSource() {
     return mMediaSource;
   }
   virtual nsresult TakePhoto(PhotoCallback* aCallback)
   {
     return NS_ERROR_NOT_IMPLEMENTED;
   }
@@ -175,18 +174,17 @@ public:
   virtual nsresult Config(bool aEchoOn, uint32_t aEcho,
                           bool aAgcOn, uint32_t aAGC,
                           bool aNoiseOn, uint32_t aNoise,
                           int32_t aPlayoutDelay);
 
   virtual void NotifyPull(MediaStreamGraph* aGraph,
                           SourceMediaStream* aSource,
                           TrackID aId,
-                          StreamTime aDesiredTime,
-                          StreamTime &aLastEndTime);
+                          StreamTime aDesiredTime) MOZ_OVERRIDE;
 
   virtual bool IsFake() {
     return false;
   }
 
   virtual const MediaSourceType GetMediaSource() {
     return MediaSourceType::Microphone;
   }
--- a/dom/media/webrtc/MediaEngineWebRTCAudio.cpp
+++ b/dom/media/webrtc/MediaEngineWebRTCAudio.cpp
@@ -393,25 +393,21 @@ MediaEngineWebRTCAudioSource::Stop(Sourc
   }
   return NS_OK;
 }
 
 void
 MediaEngineWebRTCAudioSource::NotifyPull(MediaStreamGraph* aGraph,
                                          SourceMediaStream *aSource,
                                          TrackID aID,
-                                         StreamTime aDesiredTime,
-                                         StreamTime &aLastEndTime)
+                                         StreamTime aDesiredTime)
 {
   // Ignore - we push audio data
 #ifdef DEBUG
-  StreamTime delta = aDesiredTime - aLastEndTime;
-  LOG(("Audio: NotifyPull: aDesiredTime %ld, delta %ld",(int64_t) aDesiredTime,
-       (int64_t) delta));
-  aLastEndTime = aDesiredTime;
+  LOG(("Audio: NotifyPull: aDesiredTime %ld", aDesiredTime));
 #endif
 }
 
 void
 MediaEngineWebRTCAudioSource::Init()
 {
   mVoEBase = webrtc::VoEBase::GetInterface(mVoiceEngine);
 
--- a/dom/media/webrtc/MediaEngineWebRTCVideo.cpp
+++ b/dom/media/webrtc/MediaEngineWebRTCVideo.cpp
@@ -119,45 +119,42 @@ MediaEngineWebRTCVideoSource::DeliverFra
 
 // Called if the graph thinks it's running out of buffered video; repeat
 // the last frame for whatever minimum period it think it needs.  Note that
 // this means that no *real* frame can be inserted during this period.
 void
 MediaEngineWebRTCVideoSource::NotifyPull(MediaStreamGraph* aGraph,
                                          SourceMediaStream* aSource,
                                          TrackID aID,
-                                         StreamTime aDesiredTime,
-                                         StreamTime &aLastEndTime)
+                                         StreamTime aDesiredTime)
 {
   VideoSegment segment;
 
   MonitorAutoLock lock(mMonitor);
   // B2G does AddTrack, but holds kStarted until the hardware changes state.
   // So mState could be kReleased here.  We really don't care about the state,
   // though.
 
-  StreamTime delta = aDesiredTime - aLastEndTime;
+  StreamTime delta = aDesiredTime - mProducedDuration;
   LOGFRAME(("NotifyPull, desired = %ld, delta = %ld %s", (int64_t) aDesiredTime,
             (int64_t) delta, mImage.get() ? "" : "<null>"));
 
   // Bug 846188 We may want to limit incoming frames to the requested frame rate
   // mFps - if you want 30FPS, and the camera gives you 60FPS, this could
   // cause issues.
   // We may want to signal if the actual frame rate is below mMinFPS -
   // cameras often don't return the requested frame rate especially in low
   // light; we should consider surfacing this so that we can switch to a
   // lower resolution (which may up the frame rate)
 
   // Don't append if we've already provided a frame that supposedly goes past the current aDesiredTime
   // Doing so means a negative delta and thus messes up handling of the graph
   if (delta > 0) {
     // nullptr images are allowed
-    if (AppendToTrack(aSource, mImage, aID, delta)) {
-      aLastEndTime = aDesiredTime;
-    }
+    AppendToTrack(aSource, mImage, aID, delta);
   }
 }
 
 /*static*/
 bool
 MediaEngineWebRTCVideoSource::SatisfiesConstraintSet(const MediaTrackConstraintSet &aConstraints,
                                                      const webrtc::CaptureCapability& aCandidate) {
   if (!MediaEngineCameraVideoSource::IsWithin(aCandidate.width, aConstraints.mWidth) ||