author | Andreas Pehrson <apehrson@mozilla.com> |
Thu, 17 Oct 2019 18:31:22 +0000 | |
changeset 498104 | 721bbb99b98dbeacde74e75afb55314a3b78e539 |
parent 498103 | 131a93988dcd996ac62a18ebe88719d7e30b4e3e |
child 498105 | e9dada6878890675b3bcb6decbf65a1d7e2b3fae |
push id | 98283 |
push user | pehrsons@gmail.com |
push date | Fri, 18 Oct 2019 09:02:05 +0000 |
treeherder | autoland@721bbb99b98d [default view] [failures only] |
perfherder | [talos] [build metrics] [platform microbench] (compared to previous push) |
reviewers | bryce |
bugs | 1587248 |
milestone | 71.0a1 |
first release with | nightly linux32
nightly linux64
nightly mac
nightly win32
nightly win64
|
last release without | nightly linux32
nightly linux64
nightly mac
nightly win32
nightly win64
|
--- a/dom/html/HTMLMediaElement.cpp +++ b/dom/html/HTMLMediaElement.cpp @@ -2288,25 +2288,28 @@ void HTMLMediaElement::NotifyMediaTrackE return; } mDisableVideo = false; } else { MOZ_ASSERT(false, "Unknown track type"); } if (mSrcStream) { - MOZ_ASSERT(mMediaStreamRenderer); if (AudioTrack* t = aTrack->AsAudioTrack()) { - mMediaStreamRenderer->AddTrack(t->GetAudioStreamTrack()); + if (mMediaStreamRenderer) { + mMediaStreamRenderer->AddTrack(t->GetAudioStreamTrack()); + } } else if (VideoTrack* t = aTrack->AsVideoTrack()) { MOZ_ASSERT(!mSelectedVideoStreamTrack); mSelectedVideoStreamTrack = t->GetVideoStreamTrack(); mSelectedVideoStreamTrack->AddPrincipalChangeObserver(this); - mMediaStreamRenderer->AddTrack(mSelectedVideoStreamTrack); + if (mMediaStreamRenderer) { + mMediaStreamRenderer->AddTrack(mSelectedVideoStreamTrack); + } nsContentUtils::CombineResourcePrincipals( &mSrcStreamVideoPrincipal, mSelectedVideoStreamTrack->GetPrincipal()); if (VideoFrameContainer* container = GetVideoFrameContainer()) { HTMLVideoElement* self = static_cast<HTMLVideoElement*>(this); if (mSrcStreamIsPlaying) { MaybeBeginCloningVisually(); } else if (self->VideoWidth() <= 1 && self->VideoHeight() <= 1) { // MediaInfo uses dummy values of 1 for width and height to @@ -2348,18 +2351,20 @@ void HTMLMediaElement::NotifyMediaTrackD aTrack->AsAudioTrack() ? "Audio" : "Video", NS_ConvertUTF16toUTF8(id).get())); #endif MOZ_ASSERT((!aTrack->AsAudioTrack() || !aTrack->AsAudioTrack()->Enabled()) && (!aTrack->AsVideoTrack() || !aTrack->AsVideoTrack()->Selected())); if (AudioTrack* t = aTrack->AsAudioTrack()) { - if (mMediaStreamRenderer) { - mMediaStreamRenderer->RemoveTrack(t->GetAudioStreamTrack()); + if (mSrcStream) { + if (mMediaStreamRenderer) { + mMediaStreamRenderer->RemoveTrack(t->GetAudioStreamTrack()); + } } // If we don't have any live tracks, we don't need to mute MediaElement. MOZ_DIAGNOSTIC_ASSERT(AudioTracks(), "Element can't have been unlinked"); if (AudioTracks()->Length() > 0) { bool shouldMute = true; for (uint32_t i = 0; i < AudioTracks()->Length(); ++i) { if ((*AudioTracks())[i]->Enabled()) { shouldMute = false; @@ -2367,24 +2372,26 @@ void HTMLMediaElement::NotifyMediaTrackD } } if (shouldMute) { SetMutedInternal(mMuted | MUTED_BY_AUDIO_TRACK); } } } else if (aTrack->AsVideoTrack()) { - if (mMediaStreamRenderer) { + if (mSrcStream) { MOZ_DIAGNOSTIC_ASSERT(mSelectedVideoStreamTrack == aTrack->AsVideoTrack()->GetVideoStreamTrack()); if (mFirstFrameListener) { mSelectedVideoStreamTrack->RemoveVideoOutput(mFirstFrameListener); mFirstFrameListener = nullptr; } - mMediaStreamRenderer->RemoveTrack(mSelectedVideoStreamTrack); + if (mMediaStreamRenderer) { + mMediaStreamRenderer->RemoveTrack(mSelectedVideoStreamTrack); + } mSelectedVideoStreamTrack->RemovePrincipalChangeObserver(this); mSelectedVideoStreamTrack = nullptr; } } if (mReadyState == HAVE_NOTHING) { // No MediaStreamTracks are captured until we have metadata, and code // below doesn't do anything for captured decoders. @@ -4809,33 +4816,38 @@ void HTMLMediaElement::UpdateSrcMediaStr LOG(LogLevel::Debug, ("MediaElement %p %s playback of DOMMediaStream %p", this, shouldPlay ? "Setting up" : "Removing", mSrcStream.get())); if (shouldPlay) { mSrcStreamPlaybackEnded = false; - mMediaStreamRenderer->Start(); + if (mMediaStreamRenderer) { + mMediaStreamRenderer->Start(); + } + if (mSink.second()) { NS_WARNING( "setSinkId() when playing a MediaStream is not supported yet and " "will be ignored"); } if (mSelectedVideoStreamTrack && GetVideoFrameContainer()) { MaybeBeginCloningVisually(); } SetCapturedOutputStreamsEnabled(true); // Unmute // If the input is a media stream, we don't check its data and always regard // it as audible when it's playing. SetAudibleState(true); } else { - mMediaStreamRenderer->Stop(); + if (mMediaStreamRenderer) { + mMediaStreamRenderer->Stop(); + } VideoFrameContainer* container = GetVideoFrameContainer(); if (mSelectedVideoStreamTrack && container) { HTMLVideoElement* self = static_cast<HTMLVideoElement*>(this); if (self->VideoWidth() <= 1 && self->VideoHeight() <= 1) { // MediaInfo uses dummy values of 1 for width and height to // mark video as valid. We need a new first-frame listener // if size is 0x0 or 1x1. if (!mFirstFrameListener) {