☠☠ backed out by e85551acd081 ☠ ☠ | |
author | Paul Adenot <paul@paul.cx> |
Mon, 05 Mar 2018 13:31:00 +0100 | |
changeset 459717 | 41bb4a676575ed56154b9f665199131067245d99 |
parent 459716 | 214a9d4099a6907aeeab59417162ef008ad0de0f |
child 459718 | abca0eb36d337636bfbe53b37df55c93c61c7023 |
push id | 8824 |
push user | archaeopteryx@coole-files.de |
push date | Mon, 12 Mar 2018 14:54:48 +0000 |
treeherder | mozilla-beta@8d9daab95d68 [default view] [failures only] |
perfherder | [talos] [build metrics] [platform microbench] (compared to previous push) |
reviewers | jya, pehrsons |
bugs | 1437366 |
milestone | 60.0a1 |
first release with | nightly linux32
nightly linux64
nightly mac
nightly win32
nightly win64
|
last release without | nightly linux32
nightly linux64
nightly mac
nightly win32
nightly win64
|
media/webrtc/signaling/src/mediapipeline/MediaPipeline.cpp | file | annotate | diff | comparison | revisions |
--- a/media/webrtc/signaling/src/mediapipeline/MediaPipeline.cpp +++ b/media/webrtc/signaling/src/mediapipeline/MediaPipeline.cpp @@ -1960,43 +1960,47 @@ public: , mTrackId(aTrack->GetInputTrackId()) , mSource(mTrack->GetInputStream()->AsSourceStream()) , mPlayedTicks(0) , mPrincipalHandle(PRINCIPAL_HANDLE_NONE) , mListening(false) , mMaybeTrackNeedsUnmute(true) { MOZ_RELEASE_ASSERT(mSource, "Must be used with a SourceMediaStream"); + } + + virtual ~GenericReceiveListener() + { + NS_ReleaseOnMainThreadSystemGroup( + "GenericReceiveListener::track_", mTrack.forget()); + } + + void AddTrackToSource(uint32_t aRate = 0) + { + MOZ_ASSERT((aRate != 0 && mTrack->AsAudioStreamTrack()) || + mTrack->AsVideoStreamTrack()); if (mTrack->AsAudioStreamTrack()) { mSource->AddAudioTrack( - mTrackId, mSource->GraphRate(), 0, new AudioSegment()); + mTrackId, aRate, 0, new AudioSegment()); } else if (mTrack->AsVideoStreamTrack()) { mSource->AddTrack(mTrackId, 0, new VideoSegment()); - } else { - MOZ_ASSERT_UNREACHABLE("Unknown track type"); } CSFLogDebug( LOGTAG, "GenericReceiveListener added %s track %d (%p) to stream %p", mTrack->AsAudioStreamTrack() ? "audio" : "video", mTrackId, mTrack.get(), mSource.get()); mSource->AdvanceKnownTracksTime(STREAM_TIME_MAX); mSource->AddListener(this); } - virtual ~GenericReceiveListener() - { - NS_ReleaseOnMainThreadSystemGroup( - "GenericReceiveListener::track_", mTrack.forget()); - } - void AddSelf() { if (!mListening) { mListening = true; mSource->SetPullEnabled(true); mMaybeTrackNeedsUnmute = true; } } @@ -2110,16 +2114,17 @@ public: ->IsSamplingFreqSupported(mSource->GraphRate()) ? mSource->GraphRate() : WEBRTC_MAX_SAMPLE_RATE) , mTaskQueue( new AutoTaskQueue(GetMediaThreadPool(MediaThreadType::WEBRTC_DECODER), "AudioPipelineListener")) , mLastLog(0) { + AddTrackToSource(mRate); } // Implement MediaStreamListener void NotifyPull(MediaStreamGraph* aGraph, StreamTime aDesiredTime) override { NotifyPullImpl(aDesiredTime); } @@ -2141,20 +2146,23 @@ private: { NS_ReleaseOnMainThreadSystemGroup("MediaPipeline::mConduit", mConduit.forget()); } void NotifyPullImpl(StreamTime aDesiredTime) { uint32_t samplesPer10ms = mRate / 100; - // Determine how many frames we need. - // As we get frames from conduit_ at the same rate as the graph's rate, - // the number of frames needed straightfully determined. - TrackTicks framesNeeded = aDesiredTime - mPlayedTicks; + + // mSource's rate is not necessarily the same as the graph rate, since there + // are sample-rate constraints on the inbound audio: only 16, 32, 44.1 and + // 48kHz are supported. The audio frames we get here is going to be + // resampled when inserted into the graph. + TrackTicks desired = mSource->TimeToTicksRoundUp(mRate, aDesiredTime); + TrackTicks framesNeeded = desired - mPlayedTicks; while (framesNeeded >= 0) { const int scratchBufferLength = AUDIO_SAMPLE_BUFFER_MAX_BYTES / sizeof(int16_t); int16_t scratchBuffer[scratchBufferLength]; int samplesLength = scratchBufferLength; @@ -2308,16 +2316,17 @@ class MediaPipelineReceiveVideo::Pipelin { public: explicit PipelineListener(dom::MediaStreamTrack* aTrack) : GenericReceiveListener(aTrack) , mImageContainer( LayerManager::CreateImageContainer(ImageContainer::ASYNCHRONOUS)) , mMutex("Video PipelineListener") { + AddTrackToSource(); } // Implement MediaStreamListener void NotifyPull(MediaStreamGraph* aGraph, StreamTime aDesiredTime) override { MutexAutoLock lock(mMutex); RefPtr<Image> image = mImage;