--- a/dom/media/MediaManager.h
+++ b/dom/media/MediaManager.h
@@ -84,16 +84,18 @@ public:
void Activate(already_AddRefed<SourceMediaStream> aStream,
MediaEngineSource* aAudioSource,
MediaEngineSource* aVideoSource)
{
NS_ASSERTION(NS_IsMainThread(), "Only call on main thread");
mStream = aStream;
mAudioSource = aAudioSource;
mVideoSource = aVideoSource;
+ mLastEndTimeAudio = 0;
+ mLastEndTimeVideo = 0;
mStream->AddListener(this);
}
MediaStream *Stream() // Can be used to test if Activate was called
{
return mStream;
}
@@ -180,20 +182,20 @@ public:
// Proxy NotifyPull() to sources
virtual void
NotifyPull(MediaStreamGraph* aGraph, StreamTime aDesiredTime) MOZ_OVERRIDE
{
// Currently audio sources ignore NotifyPull, but they could
// watch it especially for fake audio.
if (mAudioSource) {
- mAudioSource->NotifyPull(aGraph, mStream, kAudioTrack, aDesiredTime);
+ mAudioSource->NotifyPull(aGraph, mStream, kAudioTrack, aDesiredTime, mLastEndTimeAudio);
}
if (mVideoSource) {
- mVideoSource->NotifyPull(aGraph, mStream, kVideoTrack, aDesiredTime);
+ mVideoSource->NotifyPull(aGraph, mStream, kVideoTrack, aDesiredTime, mLastEndTimeVideo);
}
}
virtual void
NotifyEvent(MediaStreamGraph* aGraph,
MediaStreamListener::MediaStreamGraphEvent aEvent) MOZ_OVERRIDE
{
switch (aEvent) {
@@ -232,16 +234,18 @@ private:
// Set at Activate on MainThread
// Accessed from MediaStreamGraph thread, MediaManager thread, and MainThread
// No locking needed as they're only addrefed except on the MediaManager thread
nsRefPtr<MediaEngineSource> mAudioSource; // threadsafe refcnt
nsRefPtr<MediaEngineSource> mVideoSource; // threadsafe refcnt
nsRefPtr<SourceMediaStream> mStream; // threadsafe refcnt
+ StreamTime mLastEndTimeAudio;
+ StreamTime mLastEndTimeVideo;
bool mFinished;
// Accessed from MainThread and MSG thread
Mutex mLock; // protects mRemoved access from MainThread
bool mRemoved;
};
class GetUserMediaNotificationEvent: public nsRunnable
--- a/dom/media/webrtc/MediaEngine.h
+++ b/dom/media/webrtc/MediaEngine.h
@@ -105,17 +105,18 @@ public:
/* tell the source if there are any direct listeners attached */
virtual void SetDirectListeners(bool) = 0;
/* Called when the stream wants more data */
virtual void NotifyPull(MediaStreamGraph* aGraph,
SourceMediaStream *aSource,
TrackID aId,
- StreamTime aDesiredTime) = 0;
+ StreamTime aDesiredTime,
+ StreamTime &aLastEndTime) = 0;
/* Stop the device and release the corresponding MediaStream */
virtual nsresult Stop(SourceMediaStream *aSource, TrackID aID) = 0;
/* Change device configuration. */
virtual nsresult Config(bool aEchoOn, uint32_t aEcho,
bool aAgcOn, uint32_t aAGC,
bool aNoiseOn, uint32_t aNoise,
--- a/dom/media/webrtc/MediaEngineCameraVideoSource.cpp
+++ b/dom/media/webrtc/MediaEngineCameraVideoSource.cpp
@@ -56,18 +56,16 @@ bool MediaEngineCameraVideoSource::Appen
{
MOZ_ASSERT(aSource);
VideoSegment segment;
nsRefPtr<layers::Image> image = aImage;
IntSize size(image ? mWidth : 0, image ? mHeight : 0);
segment.AppendFrame(image.forget(), delta, size);
- mProducedDuration += delta;
-
// This is safe from any thread, and is safe if the track is Finished
// or Destroyed.
// This can fail if either a) we haven't added the track yet, or b)
// we've removed or finished the track.
return aSource->AppendToTrack(aID, &(segment));
}
// A special version of the algorithm for cameras that don't list capabilities.
--- a/dom/media/webrtc/MediaEngineCameraVideoSource.h
+++ b/dom/media/webrtc/MediaEngineCameraVideoSource.h
@@ -20,17 +20,16 @@ class MediaEngineCameraVideoSource : pub
{
public:
explicit MediaEngineCameraVideoSource(int aIndex,
const char* aMonitorName = "Camera.Monitor")
: MediaEngineVideoSource(kReleased)
, mMonitor(aMonitorName)
, mWidth(0)
, mHeight(0)
- , mProducedDuration(0)
, mInitDone(false)
, mHasDirectListeners(false)
, mCaptureIndex(aIndex)
, mTrackID(0)
, mFps(-1)
{}
@@ -87,18 +86,16 @@ protected:
Monitor mMonitor; // Monitor for processing Camera frames.
nsRefPtr<layers::Image> mImage;
nsRefPtr<layers::ImageContainer> mImageContainer;
int mWidth, mHeight; // protected with mMonitor on Gonk due to different threading
// end of data protected by mMonitor
nsTArray<SourceMediaStream*> mSources; // When this goes empty, we shut down HW
- StreamTime mProducedDuration;
-
bool mInitDone;
bool mHasDirectListeners;
int mCaptureIndex;
TrackID mTrackID;
int mFps; // Track rate (30 fps by default)
webrtc::CaptureCapability mCapability; // Doesn't work on OS X.
--- a/dom/media/webrtc/MediaEngineDefault.cpp
+++ b/dom/media/webrtc/MediaEngineDefault.cpp
@@ -42,17 +42,16 @@ NS_IMPL_ISUPPORTS(MediaEngineDefaultVide
* Default video source.
*/
MediaEngineDefaultVideoSource::MediaEngineDefaultVideoSource()
: MediaEngineVideoSource(kReleased)
, mTimer(nullptr)
, mMonitor("Fake video")
, mCb(16), mCr(16)
- , mProducedDuration(0)
{
mImageContainer = layers::LayerManager::CreateImageContainer();
}
MediaEngineDefaultVideoSource::~MediaEngineDefaultVideoSource()
{}
void
@@ -240,37 +239,38 @@ MediaEngineDefaultVideoSource::Notify(ns
return NS_OK;
}
void
MediaEngineDefaultVideoSource::NotifyPull(MediaStreamGraph* aGraph,
SourceMediaStream *aSource,
TrackID aID,
- StreamTime aDesiredTime)
+ StreamTime aDesiredTime,
+ StreamTime &aLastEndTime)
{
// AddTrack takes ownership of segment
VideoSegment segment;
MonitorAutoLock lock(mMonitor);
if (mState != kStarted) {
return;
}
// Note: we're not giving up mImage here
nsRefPtr<layers::Image> image = mImage;
- StreamTime delta = aDesiredTime - mProducedDuration;
+ StreamTime delta = aDesiredTime - aLastEndTime;
if (delta > 0) {
// nullptr images are allowed
IntSize size(image ? mOpts.mWidth : 0, image ? mOpts.mHeight : 0);
segment.AppendFrame(image.forget(), delta, size);
// This can fail if either a) we haven't added the track yet, or b)
// we've removed or finished the track.
if (aSource->AppendToTrack(aID, &segment)) {
- mProducedDuration = aDesiredTime;
+ aLastEndTime = aDesiredTime;
}
// Generate null data for fake tracks.
if (mHasFakeTracks) {
for (int i = 0; i < kFakeVideoTrackCount; ++i) {
VideoSegment nullSegment;
nullSegment.AppendNullData(delta);
aSource->AppendToTrack(kTrackCount + i, &nullSegment);
}
--- a/dom/media/webrtc/MediaEngineDefault.h
+++ b/dom/media/webrtc/MediaEngineDefault.h
@@ -48,17 +48,18 @@ public:
virtual void SetDirectListeners(bool aHasDirectListeners) {};
virtual nsresult Config(bool aEchoOn, uint32_t aEcho,
bool aAgcOn, uint32_t aAGC,
bool aNoiseOn, uint32_t aNoise,
int32_t aPlayoutDelay) { return NS_OK; };
virtual void NotifyPull(MediaStreamGraph* aGraph,
SourceMediaStream *aSource,
TrackID aId,
- StreamTime aDesiredTime) MOZ_OVERRIDE;
+ StreamTime aDesiredTime,
+ StreamTime &aLastEndTime);
virtual bool SatisfiesConstraintSets(
const nsTArray<const dom::MediaTrackConstraintSet*>& aConstraintSets)
{
return true;
}
virtual bool IsFake() {
return true;
@@ -90,18 +91,16 @@ protected:
Monitor mMonitor;
nsRefPtr<layers::Image> mImage;
nsRefPtr<layers::ImageContainer> mImageContainer;
MediaEnginePrefs mOpts;
int mCb;
int mCr;
-
- StreamTime mProducedDuration;
};
class SineWaveGenerator;
class MediaEngineDefaultAudioSource : public nsITimerCallback,
public MediaEngineAudioSource
{
public:
@@ -118,17 +117,18 @@ public:
virtual void SetDirectListeners(bool aHasDirectListeners) {};
virtual nsresult Config(bool aEchoOn, uint32_t aEcho,
bool aAgcOn, uint32_t aAGC,
bool aNoiseOn, uint32_t aNoise,
int32_t aPlayoutDelay) { return NS_OK; };
virtual void NotifyPull(MediaStreamGraph* aGraph,
SourceMediaStream *aSource,
TrackID aId,
- StreamTime aDesiredTime) {}
+ StreamTime aDesiredTime,
+ StreamTime &aLastEndTime) {}
virtual bool IsFake() {
return true;
}
virtual const MediaSourceType GetMediaSource() {
return MediaSourceType::Microphone;
}
--- a/dom/media/webrtc/MediaEngineGonkVideoSource.cpp
+++ b/dom/media/webrtc/MediaEngineGonkVideoSource.cpp
@@ -40,28 +40,29 @@ NS_IMPL_RELEASE_INHERITED(MediaEngineGon
// Called if the graph thinks it's running out of buffered video; repeat
// the last frame for whatever minimum period it think it needs. Note that
// this means that no *real* frame can be inserted during this period.
void
MediaEngineGonkVideoSource::NotifyPull(MediaStreamGraph* aGraph,
SourceMediaStream* aSource,
TrackID aID,
- StreamTime aDesiredTime)
+ StreamTime aDesiredTime,
+ StreamTime& aLastEndTime)
{
VideoSegment segment;
MonitorAutoLock lock(mMonitor);
// B2G does AddTrack, but holds kStarted until the hardware changes state.
// So mState could be kReleased here. We really don't care about the state,
// though.
// Note: we're not giving up mImage here
nsRefPtr<layers::Image> image = mImage;
- StreamTime delta = aDesiredTime - mProducedDuration;
+ StreamTime delta = aDesiredTime - aLastEndTime;
LOGFRAME(("NotifyPull, desired = %ld, delta = %ld %s", (int64_t) aDesiredTime,
(int64_t) delta, image ? "" : "<null>"));
// Bug 846188 We may want to limit incoming frames to the requested frame rate
// mFps - if you want 30FPS, and the camera gives you 60FPS, this could
// cause issues.
// We may want to signal if the actual frame rate is below mMinFPS -
// cameras often don't return the requested frame rate especially in low
@@ -72,17 +73,17 @@ MediaEngineGonkVideoSource::NotifyPull(M
// Doing so means a negative delta and thus messes up handling of the graph
if (delta > 0) {
// nullptr images are allowed
IntSize size(image ? mWidth : 0, image ? mHeight : 0);
segment.AppendFrame(image.forget(), delta, size);
// This can fail if either a) we haven't added the track yet, or b)
// we've removed or finished the track.
if (aSource->AppendToTrack(aID, &(segment))) {
- mProducedDuration = aDesiredTime;
+ aLastEndTime = aDesiredTime;
}
}
}
void
MediaEngineGonkVideoSource::ChooseCapability(const VideoTrackConstraintsN& aConstraints,
const MediaEnginePrefs& aPrefs)
{
--- a/dom/media/webrtc/MediaEngineGonkVideoSource.h
+++ b/dom/media/webrtc/MediaEngineGonkVideoSource.h
@@ -43,34 +43,33 @@ class MediaEngineGonkVideoSource : publi
{
public:
NS_DECL_ISUPPORTS_INHERITED
MediaEngineGonkVideoSource(int aIndex)
: MediaEngineCameraVideoSource(aIndex, "GonkCamera.Monitor")
, mCallbackMonitor("GonkCamera.CallbackMonitor")
, mCameraControl(nullptr)
- , mProducedDuration(0)
, mRotation(0)
, mBackCamera(false)
- , mOrientationChanged(true)
- // Correct the orientation at first time takePhoto.
+ , mOrientationChanged(true) // Correct the orientation at first time takePhoto.
{
Init();
}
virtual nsresult Allocate(const VideoTrackConstraintsN &aConstraints,
const MediaEnginePrefs &aPrefs) MOZ_OVERRIDE;
virtual nsresult Deallocate() MOZ_OVERRIDE;
virtual nsresult Start(SourceMediaStream* aStream, TrackID aID) MOZ_OVERRIDE;
virtual nsresult Stop(SourceMediaStream* aSource, TrackID aID) MOZ_OVERRIDE;
virtual void NotifyPull(MediaStreamGraph* aGraph,
SourceMediaStream* aSource,
TrackID aId,
- StreamTime aDesiredTime) MOZ_OVERRIDE;
+ StreamTime aDesiredTime,
+ StreamTime &aLastEndTime) MOZ_OVERRIDE;
virtual bool SatisfiesConstraintSets(
const nsTArray<const dom::MediaTrackConstraintSet*>& aConstraintSets)
{
return true;
}
void OnHardwareStateChange(HardwareState aState, nsresult aReason) MOZ_OVERRIDE;
void GetRotation();
@@ -102,17 +101,16 @@ protected:
void Shutdown();
void ChooseCapability(const VideoTrackConstraintsN& aConstraints,
const MediaEnginePrefs& aPrefs);
mozilla::ReentrantMonitor mCallbackMonitor; // Monitor for camera callback handling
// This is only modified on MainThread (AllocImpl and DeallocImpl)
nsRefPtr<ICameraControl> mCameraControl;
nsCOMPtr<nsIDOMFile> mLastCapture;
- StreamTime mProducedDuration;
// These are protected by mMonitor in parent class
nsTArray<nsRefPtr<PhotoCallback>> mPhotoCallbacks;
int mRotation;
int mCameraAngle; // See dom/base/ScreenOrientation.h
bool mBackCamera;
bool mOrientationChanged; // True when screen rotates.
--- a/dom/media/webrtc/MediaEngineTabVideoSource.cpp
+++ b/dom/media/webrtc/MediaEngineTabVideoSource.cpp
@@ -28,18 +28,17 @@
namespace mozilla {
using namespace mozilla::gfx;
using dom::ConstrainLongRange;
NS_IMPL_ISUPPORTS(MediaEngineTabVideoSource, nsIDOMEventListener, nsITimerCallback)
MediaEngineTabVideoSource::MediaEngineTabVideoSource()
- : mProducedDuration(0)
- , mMonitor("MediaEngineTabVideoSource")
+: mMonitor("MediaEngineTabVideoSource"), mTabSource(nullptr)
{
}
nsresult
MediaEngineTabVideoSource::StartRunnable::Run()
{
mVideoSource->Draw();
mVideoSource->mTimer = do_CreateInstance(NS_TIMER_CONTRACTID);
@@ -188,32 +187,33 @@ MediaEngineTabVideoSource::Start(SourceM
aStream->AdvanceKnownTracksTime(STREAM_TIME_MAX);
return NS_OK;
}
void
MediaEngineTabVideoSource::NotifyPull(MediaStreamGraph*,
SourceMediaStream* aSource,
- TrackID aID, StreamTime aDesiredTime)
+ TrackID aID, StreamTime aDesiredTime,
+ StreamTime& aLastEndTime)
{
VideoSegment segment;
MonitorAutoLock mon(mMonitor);
// Note: we're not giving up mImage here
nsRefPtr<layers::CairoImage> image = mImage;
- StreamTime delta = aDesiredTime - mProducedDuration;
+ StreamTime delta = aDesiredTime - aLastEndTime;
if (delta > 0) {
// nullptr images are allowed
gfx::IntSize size = image ? image->GetSize() : IntSize(0, 0);
segment.AppendFrame(image.forget().downcast<layers::Image>(), delta, size);
// This can fail if either a) we haven't added the track yet, or b)
// we've removed or finished the track.
if (aSource->AppendToTrack(aID, &(segment))) {
- mProducedDuration = aDesiredTime;
+ aLastEndTime = aDesiredTime;
}
}
}
void
MediaEngineTabVideoSource::Draw() {
IntSize size(mBufW, mBufH);
--- a/dom/media/webrtc/MediaEngineTabVideoSource.h
+++ b/dom/media/webrtc/MediaEngineTabVideoSource.h
@@ -20,17 +20,17 @@ class MediaEngineTabVideoSource : public
virtual void GetName(nsAString_internal&);
virtual void GetUUID(nsAString_internal&);
virtual nsresult Allocate(const VideoTrackConstraintsN &,
const mozilla::MediaEnginePrefs&);
virtual nsresult Deallocate();
virtual nsresult Start(mozilla::SourceMediaStream*, mozilla::TrackID);
virtual void SetDirectListeners(bool aHasDirectListeners) {};
- virtual void NotifyPull(mozilla::MediaStreamGraph*, mozilla::SourceMediaStream*, mozilla::TrackID, mozilla::StreamTime) MOZ_OVERRIDE;
+ virtual void NotifyPull(mozilla::MediaStreamGraph*, mozilla::SourceMediaStream*, mozilla::TrackID, mozilla::StreamTime, mozilla::StreamTime&);
virtual nsresult Stop(mozilla::SourceMediaStream*, mozilla::TrackID);
virtual nsresult Config(bool, uint32_t, bool, uint32_t, bool, uint32_t, int32_t);
virtual bool IsFake();
virtual const MediaSourceType GetMediaSource() {
return MediaSourceType::Browser;
}
virtual bool SatisfiesConstraintSets(
const nsTArray<const dom::MediaTrackConstraintSet*>& aConstraintSets)
@@ -74,13 +74,12 @@ private:
int mBufH;
int64_t mWindowId;
bool mScrollWithPage;
int mTimePerFrame;
ScopedFreePtr<unsigned char> mData;
nsCOMPtr<nsIDOMWindow> mWindow;
nsRefPtr<layers::CairoImage> mImage;
nsCOMPtr<nsITimer> mTimer;
- StreamTime mProducedDuration;
Monitor mMonitor;
nsCOMPtr<nsITabSource> mTabSource;
};
}
--- a/dom/media/webrtc/MediaEngineWebRTC.h
+++ b/dom/media/webrtc/MediaEngineWebRTC.h
@@ -92,17 +92,18 @@ public:
virtual nsresult Allocate(const VideoTrackConstraintsN& aConstraints,
const MediaEnginePrefs& aPrefs);
virtual nsresult Deallocate();
virtual nsresult Start(SourceMediaStream*, TrackID);
virtual nsresult Stop(SourceMediaStream*, TrackID);
virtual void NotifyPull(MediaStreamGraph* aGraph,
SourceMediaStream* aSource,
TrackID aId,
- StreamTime aDesiredTime) MOZ_OVERRIDE;
+ StreamTime aDesiredTime,
+ StreamTime &aLastEndTime);
virtual const MediaSourceType GetMediaSource() {
return mMediaSource;
}
virtual nsresult TakePhoto(PhotoCallback* aCallback)
{
return NS_ERROR_NOT_IMPLEMENTED;
}
@@ -174,17 +175,18 @@ public:
virtual nsresult Config(bool aEchoOn, uint32_t aEcho,
bool aAgcOn, uint32_t aAGC,
bool aNoiseOn, uint32_t aNoise,
int32_t aPlayoutDelay);
virtual void NotifyPull(MediaStreamGraph* aGraph,
SourceMediaStream* aSource,
TrackID aId,
- StreamTime aDesiredTime) MOZ_OVERRIDE;
+ StreamTime aDesiredTime,
+ StreamTime &aLastEndTime);
virtual bool IsFake() {
return false;
}
virtual const MediaSourceType GetMediaSource() {
return MediaSourceType::Microphone;
}
--- a/dom/media/webrtc/MediaEngineWebRTCAudio.cpp
+++ b/dom/media/webrtc/MediaEngineWebRTCAudio.cpp
@@ -393,21 +393,25 @@ MediaEngineWebRTCAudioSource::Stop(Sourc
}
return NS_OK;
}
void
MediaEngineWebRTCAudioSource::NotifyPull(MediaStreamGraph* aGraph,
SourceMediaStream *aSource,
TrackID aID,
- StreamTime aDesiredTime)
+ StreamTime aDesiredTime,
+ StreamTime &aLastEndTime)
{
// Ignore - we push audio data
#ifdef DEBUG
- LOG(("Audio: NotifyPull: aDesiredTime %ld", aDesiredTime));
+ StreamTime delta = aDesiredTime - aLastEndTime;
+ LOG(("Audio: NotifyPull: aDesiredTime %ld, delta %ld",(int64_t) aDesiredTime,
+ (int64_t) delta));
+ aLastEndTime = aDesiredTime;
#endif
}
void
MediaEngineWebRTCAudioSource::Init()
{
mVoEBase = webrtc::VoEBase::GetInterface(mVoiceEngine);
--- a/dom/media/webrtc/MediaEngineWebRTCVideo.cpp
+++ b/dom/media/webrtc/MediaEngineWebRTCVideo.cpp
@@ -119,42 +119,45 @@ MediaEngineWebRTCVideoSource::DeliverFra
// Called if the graph thinks it's running out of buffered video; repeat
// the last frame for whatever minimum period it think it needs. Note that
// this means that no *real* frame can be inserted during this period.
void
MediaEngineWebRTCVideoSource::NotifyPull(MediaStreamGraph* aGraph,
SourceMediaStream* aSource,
TrackID aID,
- StreamTime aDesiredTime)
+ StreamTime aDesiredTime,
+ StreamTime &aLastEndTime)
{
VideoSegment segment;
MonitorAutoLock lock(mMonitor);
// B2G does AddTrack, but holds kStarted until the hardware changes state.
// So mState could be kReleased here. We really don't care about the state,
// though.
- StreamTime delta = aDesiredTime - mProducedDuration;
+ StreamTime delta = aDesiredTime - aLastEndTime;
LOGFRAME(("NotifyPull, desired = %ld, delta = %ld %s", (int64_t) aDesiredTime,
(int64_t) delta, mImage.get() ? "" : "<null>"));
// Bug 846188 We may want to limit incoming frames to the requested frame rate
// mFps - if you want 30FPS, and the camera gives you 60FPS, this could
// cause issues.
// We may want to signal if the actual frame rate is below mMinFPS -
// cameras often don't return the requested frame rate especially in low
// light; we should consider surfacing this so that we can switch to a
// lower resolution (which may up the frame rate)
// Don't append if we've already provided a frame that supposedly goes past the current aDesiredTime
// Doing so means a negative delta and thus messes up handling of the graph
if (delta > 0) {
// nullptr images are allowed
- AppendToTrack(aSource, mImage, aID, delta);
+ if (AppendToTrack(aSource, mImage, aID, delta)) {
+ aLastEndTime = aDesiredTime;
+ }
}
}
/*static*/
bool
MediaEngineWebRTCVideoSource::SatisfiesConstraintSet(const MediaTrackConstraintSet &aConstraints,
const webrtc::CaptureCapability& aCandidate) {
if (!MediaEngineCameraVideoSource::IsWithin(aCandidate.width, aConstraints.mWidth) ||