Bug 639721 - Fix data race on nsBuiltinDecoderReader::mInfo - r=kinetik
authorChris Double <chris.double@double.co.nz>
Thu, 24 Mar 2011 16:53:03 +1300
changeset 63857 5915aecd1db7552dd56ae64542d6721056694bc5
parent 63856 6aac39e19f6066db5a09cdb39371a48924f87119
child 63858 9a1aa415c9a1ba234fb3330c0de5787fceb7dd67
push idunknown
push userunknown
push dateunknown
reviewerskinetik
bugs639721
milestone2.2a1pre
Bug 639721 - Fix data race on nsBuiltinDecoderReader::mInfo - r=kinetik
content/media/nsBuiltinDecoderReader.h
content/media/nsBuiltinDecoderStateMachine.cpp
content/media/nsBuiltinDecoderStateMachine.h
content/media/ogg/nsOggReader.cpp
content/media/ogg/nsOggReader.h
content/media/raw/nsRawReader.cpp
content/media/raw/nsRawReader.h
content/media/webm/nsWebMReader.cpp
content/media/webm/nsWebMReader.h
--- a/content/media/nsBuiltinDecoderReader.h
+++ b/content/media/nsBuiltinDecoderReader.h
@@ -409,16 +409,17 @@ private:
 // on either the state machine thread (when loading and seeking) or on
 // the reader thread (when it's reading and decoding). The reader encapsulates
 // the reading state and maintains it's own monitor to ensure thread safety
 // and correctness. Never hold the nsBuiltinDecoder's monitor when calling into
 // this class.
 class nsBuiltinDecoderReader : public nsRunnable {
 public:
   typedef mozilla::Monitor Monitor;
+  typedef mozilla::MonitorAutoEnter MonitorAutoEnter;
 
   nsBuiltinDecoderReader(nsBuiltinDecoder* aDecoder);
   ~nsBuiltinDecoderReader();
 
   // Initializes the reader, returns NS_OK on success, or NS_ERROR_FAILURE
   // on failure.
   virtual nsresult Init(nsBuiltinDecoderReader* aCloneDonor) = 0;
 
@@ -438,17 +439,17 @@ public:
                                   PRInt64 aTimeThreshold) = 0;
 
   virtual PRBool HasAudio() = 0;
   virtual PRBool HasVideo() = 0;
 
   // Read header data for all bitstreams in the file. Fills mInfo with
   // the data required to present the media. Returns NS_OK on success,
   // or NS_ERROR_FAILURE on failure.
-  virtual nsresult ReadMetadata() = 0;
+  virtual nsresult ReadMetadata(nsVideoInfo* aInfo) = 0;
 
   // Stores the presentation time of the first frame/sample we'd be
   // able to play if we started playback at aOffset, and returns the
   // first video sample, if we have video.
   virtual VideoData* FindStartTime(PRInt64 aOffset,
                                    PRInt64& aOutStartTime);
 
   // Returns the end time of the last page which occurs before aEndOffset.
@@ -458,21 +459,16 @@ public:
   // Moves the decode head to aTime milliseconds. aStartTime and aEndTime
   // denote the start and end times of the media in ms, and aCurrentTime
   // is the current playback position in ms.
   virtual nsresult Seek(PRInt64 aTime,
                         PRInt64 aStartTime,
                         PRInt64 aEndTime,
                         PRInt64 aCurrentTime) = 0;
 
-  // Gets presentation info required for playback.
-  const nsVideoInfo& GetInfo() {
-    return mInfo;
-  }
-
   // Queue of audio samples. This queue is threadsafe.
   MediaQueue<SoundData> mAudioQueue;
 
   // Queue of video samples. This queue is threadsafe.
   MediaQueue<VideoData> mVideoQueue;
 
   // Populates aBuffered with the time ranges which are buffered. aStartTime
   // must be the presentation time of the first sample/frame in the media, e.g.
@@ -515,13 +511,14 @@ protected:
   // Reference to the owning decoder object. Do not hold the
   // reader's monitor when accessing this.
   nsBuiltinDecoder* mDecoder;
 
   // The offset of the start of the first non-header page in the file.
   // Used to seek to media start time.
   PRInt64 mDataOffset;
 
-  // Stores presentation info required for playback.
+  // Stores presentation info required for playback. The reader's monitor
+  // must be held when accessing this.
   nsVideoInfo mInfo;
 };
 
 #endif
--- a/content/media/nsBuiltinDecoderStateMachine.cpp
+++ b/content/media/nsBuiltinDecoderStateMachine.cpp
@@ -412,18 +412,18 @@ void nsBuiltinDecoderStateMachine::Audio
   double volume = -1;
   PRBool setVolume;
   PRInt32 minWriteSamples = -1;
   PRInt64 samplesAtLastSleep = 0;
   {
     MonitorAutoEnter mon(mDecoder->GetMonitor());
     mAudioCompleted = PR_FALSE;
     audioStartTime = mAudioStartTime;
-    channels = mReader->GetInfo().mAudioChannels;
-    rate = mReader->GetInfo().mAudioRate;
+    channels = mInfo.mAudioChannels;
+    rate = mInfo.mAudioRate;
     NS_ASSERTION(audioStartTime != -1, "Should have audio start time by now");
   }
   while (1) {
 
     // Wait while we're not playing, and we're not shutting down, or we're
     // playing and we've got no audio to play.
     {
       MonitorAutoEnter mon(mDecoder->GetMonitor());
@@ -707,30 +707,32 @@ void nsBuiltinDecoderStateMachine::Start
 {
   NS_ASSERTION(IsCurrentThread(mDecoder->mStateMachineThread),
                "Should be on state machine thread.");
   NS_ASSERTION(!IsPlaying(), "Shouldn't be playing when StartPlayback() is called");
   mDecoder->GetMonitor().AssertCurrentThreadIn();
   LOG(PR_LOG_DEBUG, ("%p StartPlayback", mDecoder));
   mDecoder->mPlaybackStatistics.Start(TimeStamp::Now());
   if (HasAudio()) {
-    MonitorAutoExit exitMon(mDecoder->GetMonitor());
-    MonitorAutoEnter audioMon(mAudioMonitor);
-    if (mAudioStream) {
-      // We have an audiostream, so it must have been paused the last time
-      // StopPlayback() was called.
-      mAudioStream->Resume();
-    } else {
-      // No audiostream, create one.
-      const nsVideoInfo& info = mReader->GetInfo();
-      mAudioStream = nsAudioStream::AllocateStream();
-      mAudioStream->Init(info.mAudioChannels,
-                         info.mAudioRate,
-                         MOZ_SOUND_DATA_FORMAT);
-      mAudioStream->SetVolume(mVolume);
+    PRInt32 rate = mInfo.mAudioRate;
+    PRInt32 channels = mInfo.mAudioChannels;
+
+    {
+      MonitorAutoExit exitMon(mDecoder->GetMonitor());
+      MonitorAutoEnter audioMon(mAudioMonitor);
+      if (mAudioStream) {
+        // We have an audiostream, so it must have been paused the last time
+        // StopPlayback() was called.
+        mAudioStream->Resume();
+      } else {
+        // No audiostream, create one.
+        mAudioStream = nsAudioStream::AllocateStream();
+        mAudioStream->Init(channels, rate, MOZ_SOUND_DATA_FORMAT);
+        mAudioStream->SetVolume(mVolume);
+      }
     }
   }
   mPlayStartTime = TimeStamp::Now();
   mDecoder->GetMonitor().NotifyAll();
 }
 
 void nsBuiltinDecoderStateMachine::UpdatePlaybackPositionInternal(PRInt64 aTime)
 {
@@ -1052,18 +1054,22 @@ nsresult nsBuiltinDecoderStateMachine::R
       {
         LoadMetadata();
         if (mState == DECODER_STATE_SHUTDOWN) {
           continue;
         }
 
         VideoData* videoData = FindStartTime();
         if (videoData) {
-          MonitorAutoExit exitMon(mDecoder->GetMonitor());
-          RenderVideoFrame(videoData, TimeStamp::Now());
+          nsIntSize display = mInfo.mDisplay;
+          float aspect = mInfo.mPixelAspectRatio;
+          {
+            MonitorAutoExit exitMon(mDecoder->GetMonitor());
+            RenderVideoFrame(videoData, TimeStamp::Now(), display, aspect);
+          }
         }
 
         // Start the decode threads, so that we can pre buffer the streams.
         // and calculate the start time in order to determine the duration.
         if (NS_FAILED(StartDecodeThreads())) {
           continue;
         }
 
@@ -1077,24 +1083,23 @@ nsresult nsBuiltinDecoderStateMachine::R
 
         if (mState == DECODER_STATE_SHUTDOWN)
           continue;
 
         // Inform the element that we've loaded the metadata and the first frame,
         // setting the default framebuffer size for audioavailable events.  Also,
         // if there is audio, let the MozAudioAvailable event manager know about
         // the metadata.
-        const nsVideoInfo& info = mReader->GetInfo();
-        PRUint32 frameBufferLength = info.mAudioChannels * FRAMEBUFFER_LENGTH_PER_CHANNEL;
+        PRUint32 frameBufferLength = mInfo.mAudioChannels * FRAMEBUFFER_LENGTH_PER_CHANNEL;
         nsCOMPtr<nsIRunnable> metadataLoadedEvent =
-          new nsAudioMetadataEventRunner(mDecoder, info.mAudioChannels,
-                                         info.mAudioRate, frameBufferLength);
+          new nsAudioMetadataEventRunner(mDecoder, mInfo.mAudioChannels,
+                                         mInfo.mAudioRate, frameBufferLength);
         NS_DispatchToMainThread(metadataLoadedEvent, NS_DISPATCH_NORMAL);
         if (HasAudio()) {
-          mEventManager.Init(info.mAudioChannels, info.mAudioRate);
+          mEventManager.Init(mInfo.mAudioChannels, mInfo.mAudioRate);
           mDecoder->RequestFrameBufferLength(frameBufferLength);
         }
 
         if (mState == DECODER_STATE_DECODING_METADATA) {
           LOG(PR_LOG_DEBUG, ("%p Changed state from DECODING_METADATA to DECODING", mDecoder));
           StartDecoding();
         }
 
@@ -1175,17 +1180,22 @@ nsresult nsBuiltinDecoderStateMachine::R
             PRInt64 startTime = (audio && audio->mTime < seekTime) ? audio->mTime : seekTime;
             mAudioStartTime = startTime;
             mPlayDuration = MsToDuration(startTime - mStartTime);
             if (HasVideo()) {
               nsAutoPtr<VideoData> video(mReader->mVideoQueue.PeekFront());
               if (video) {
                 NS_ASSERTION(video->mTime <= seekTime && seekTime <= video->mEndTime,
                              "Seek target should lie inside the first frame after seek");
-                RenderVideoFrame(video, TimeStamp::Now());
+                nsIntSize display = mInfo.mDisplay;
+                float aspect = mInfo.mPixelAspectRatio;
+                {
+                  MonitorAutoExit exitMon(mDecoder->GetMonitor());
+                  RenderVideoFrame(video, TimeStamp::Now(), display, aspect);
+                }
                 mReader->mVideoQueue.PopFront();
                 nsCOMPtr<nsIRunnable> event =
                   NS_NewRunnableMethod(mDecoder, &nsBuiltinDecoder::Invalidate);
                 NS_DispatchToMainThread(event, NS_DISPATCH_NORMAL);
               }
             }
           }
         }
@@ -1331,31 +1341,31 @@ nsresult nsBuiltinDecoderStateMachine::R
       break;
     }
   }
 
   return NS_OK;
 }
 
 void nsBuiltinDecoderStateMachine::RenderVideoFrame(VideoData* aData,
-                                                    TimeStamp aTarget)
+                                                    TimeStamp aTarget,
+                                                    nsIntSize aDisplaySize,
+                                                    float aAspectRatio)
 {
   NS_ASSERTION(IsCurrentThread(mDecoder->mStateMachineThread), "Should be on state machine thread.");
+  mDecoder->GetMonitor().AssertNotCurrentThreadIn();
 
   if (aData->mDuplicate) {
     return;
   }
 
   nsRefPtr<Image> image = aData->mImage;
   if (image) {
-    const nsVideoInfo& info = mReader->GetInfo();
-    mDecoder->SetVideoData(gfxIntSize(info.mDisplay.width, info.mDisplay.height),
-                           info.mPixelAspectRatio,
-                           image,
-                           aTarget);
+    mDecoder->SetVideoData(gfxIntSize(aDisplaySize.width, aDisplaySize.height),
+                           aAspectRatio, image, aTarget);
   }
 }
 
 PRInt64
 nsBuiltinDecoderStateMachine::GetAudioClock()
 {
   NS_ASSERTION(IsCurrentThread(mDecoder->mStateMachineThread), "Should be on state machine thread.");
   if (!mAudioStream || !HasAudio())
@@ -1455,20 +1465,24 @@ void nsBuiltinDecoderStateMachine::Advan
     }
 
     if (currentFrame) {
       // Decode one frame and display it.
       TimeStamp presTime = mPlayStartTime - mPlayDuration +
                            MsToDuration(currentFrame->mTime - mStartTime);
       NS_ASSERTION(currentFrame->mTime >= mStartTime, "Should have positive frame time");
       {
-        MonitorAutoExit exitMon(mDecoder->GetMonitor());
-        // If we have video, we want to increment the clock in steps of the frame
-        // duration.
-        RenderVideoFrame(currentFrame, presTime);
+        nsIntSize display = mInfo.mDisplay;
+        float aspect = mInfo.mPixelAspectRatio;
+        {
+          MonitorAutoExit exitMon(mDecoder->GetMonitor());
+          // If we have video, we want to increment the clock in steps of the frame
+          // duration.
+          RenderVideoFrame(currentFrame, presTime, display, aspect);
+        }
       }
       mDecoder->GetFrameStatistics().NotifyPresentedFrame();
       PRInt64 now = DurationToMs(TimeStamp::Now() - mPlayStartTime + mPlayDuration);
       remainingTime = currentFrame->mEndTime - mStartTime - now;
       currentFrame = nsnull;
     }
 
     // Kick the decode thread in case it filled its buffers and put itself
@@ -1532,19 +1546,20 @@ void nsBuiltinDecoderStateMachine::Wait(
 
 VideoData* nsBuiltinDecoderStateMachine::FindStartTime()
 {
   NS_ASSERTION(IsCurrentThread(mDecoder->mStateMachineThread), "Should be on state machine thread.");
   mDecoder->GetMonitor().AssertCurrentThreadIn();
   PRInt64 startTime = 0;
   mStartTime = 0;
   VideoData* v = nsnull;
+  PRInt64 dataOffset = mInfo.mDataOffset;
   {
     MonitorAutoExit exitMon(mDecoder->GetMonitor());
-    v = mReader->FindStartTime(mReader->GetInfo().mDataOffset, startTime);
+    v = mReader->FindStartTime(dataOffset, startTime);
   }
   if (startTime != 0) {
     mStartTime = startTime;
     if (mGotDurationFromMetaData) {
       NS_ASSERTION(mEndTime != -1,
                    "We should have mEndTime as supplied duration here");
       // We were specified a duration from a Content-Duration HTTP header.
       // Adjust mEndTime so that mEndTime-mStartTime matches the specified
@@ -1608,21 +1623,22 @@ void nsBuiltinDecoderStateMachine::Updat
 void nsBuiltinDecoderStateMachine::LoadMetadata()
 {
   NS_ASSERTION(IsCurrentThread(mDecoder->mStateMachineThread),
                "Should be on state machine thread.");
   mDecoder->GetMonitor().AssertCurrentThreadIn();
 
   LOG(PR_LOG_DEBUG, ("Loading Media Headers"));
   nsresult res;
+  nsVideoInfo info;
   {
     MonitorAutoExit exitMon(mDecoder->GetMonitor());
-    res = mReader->ReadMetadata();
+    res = mReader->ReadMetadata(&info);
   }
-  const nsVideoInfo& info = mReader->GetInfo();
+  mInfo = info;
 
   if (NS_FAILED(res) || (!info.mHasVideo && !info.mHasAudio)) {
     mState = DECODER_STATE_SHUTDOWN;      
     nsCOMPtr<nsIRunnable> event =
       NS_NewRunnableMethod(mDecoder, &nsBuiltinDecoder::DecodeError);
     NS_DispatchToMainThread(event, NS_DISPATCH_NORMAL);
     return;
   }
--- a/content/media/nsBuiltinDecoderStateMachine.h
+++ b/content/media/nsBuiltinDecoderStateMachine.h
@@ -179,24 +179,24 @@ public:
   // State machine thread run function. Polls the state, sends frames to be
   // displayed at appropriate times, and generally manages the decode.
   NS_IMETHOD Run();
 
   // This is called on the state machine thread and audio thread.
   // The decoder monitor must be obtained before calling this.
   PRBool HasAudio() const {
     mDecoder->GetMonitor().AssertCurrentThreadIn();
-    return mReader->GetInfo().mHasAudio;
+    return mInfo.mHasAudio;
   }
 
   // This is called on the state machine thread and audio thread.
   // The decoder monitor must be obtained before calling this.
   PRBool HasVideo() const {
     mDecoder->GetMonitor().AssertCurrentThreadIn();
-    return mReader->GetInfo().mHasVideo;
+    return mInfo.mHasVideo;
   }
 
   // Should be called by main thread.
   PRBool HaveNextFrameData() const;
 
   // Must be called with the decode monitor held.
   PRBool IsBuffering() const {
     mDecoder->GetMonitor().AssertCurrentThreadIn();
@@ -306,20 +306,21 @@ protected:
 
   // Update only the state machine's current playback position (and duration,
   // if unknown).  Does not update the playback position on the decoder or
   // media element -- use UpdatePlaybackPosition for that.  Called on the state
   // machine thread, caller must hold the decoder lock.
   void UpdatePlaybackPositionInternal(PRInt64 aTime);
 
   // Performs YCbCr to RGB conversion, and pushes the image down the
-  // rendering pipeline. Called on the state machine thread.
-  void RenderVideoFrame(VideoData* aData,
-                        TimeStamp aTarget);
-
+  // rendering pipeline. Called on the state machine thread. The decoder
+  // monitor must not be held when calling this.
+  void RenderVideoFrame(VideoData* aData, TimeStamp aTarget, 
+                        nsIntSize aDisplaySize, float aAspectRatio);
+ 
   // If we have video, display a video frame if it's time for display has
   // arrived, otherwise sleep until it's time for the next sample. Update
   // the current frame time as appropriate, and trigger ready state update.
   // The decoder monitor must be held with exactly one lock count. Called
   // on the state machine thread.
   void AdvanceFrame();
 
   // Pushes up to aSamples samples of silence onto the audio hardware. Returns
@@ -519,11 +520,15 @@ protected:
   // Synchronised via decoder monitor.
   PRPackedBool mQuickBuffering;
 
 private:
   // Manager for queuing and dispatching MozAudioAvailable events.  The
   // event manager is accessed from the state machine and audio threads,
   // and takes care of synchronizing access to its internal queue.
   nsAudioAvailableEventManager mEventManager;
+
+  // Stores presentation info required for playback. The decoder monitor
+  // must be held when accessing this.
+  nsVideoInfo mInfo;
 };
 
 #endif
--- a/content/media/ogg/nsOggReader.cpp
+++ b/content/media/ogg/nsOggReader.cpp
@@ -164,17 +164,17 @@ static PRBool DoneReadingHeaders(nsTArra
   for (PRUint32 i = 0; i < aBitstreams .Length(); i++) {
     if (!aBitstreams [i]->DoneReadingHeaders()) {
       return PR_FALSE;
     }
   }
   return PR_TRUE;
 }
 
-nsresult nsOggReader::ReadMetadata()
+nsresult nsOggReader::ReadMetadata(nsVideoInfo* aInfo)
 {
   NS_ASSERTION(mDecoder->OnStateMachineThread(), "Should be on play state machine thread.");
   MonitorAutoEnter mon(mMonitor);
 
   // We read packets until all bitstreams have read all their header packets.
   // We record the offset of the first non-header page so that we know
   // what page to seek to when seeking to the media start.
 
@@ -348,16 +348,18 @@ nsresult nsOggReader::ReadMetadata()
     mVorbisSerial = mVorbisState->mSerial;
   }
 
   if (mTheoraState) {
     memcpy(&mTheoraInfo, &mTheoraState->mInfo, sizeof(mTheoraInfo));
     mTheoraSerial = mTheoraState->mSerial;
   }
 
+  *aInfo = mInfo;
+
   LOG(PR_LOG_DEBUG, ("Done loading headers, data offset %lld", mDataOffset));
 
   return NS_OK;
 }
 
 nsresult nsOggReader::DecodeVorbis(nsTArray<nsAutoPtr<SoundData> >& aChunks,
                                    ogg_packet* aPacket)
 {
@@ -556,16 +558,20 @@ nsresult nsOggReader::DecodeTheora(nsTAr
     PRBool isKeyframe = th_packet_iskeyframe(aPacket) == 1;
     VideoData::YCbCrBuffer b;
     for (PRUint32 i=0; i < 3; ++i) {
       b.mPlanes[i].mData = buffer[i].data;
       b.mPlanes[i].mHeight = buffer[i].height;
       b.mPlanes[i].mWidth = buffer[i].width;
       b.mPlanes[i].mStride = buffer[i].stride;
     }
+
+    // Need the monitor to be held to be able to use mInfo. This
+    // is held by our caller.
+    mMonitor.AssertCurrentThreadIn();
     VideoData *v = VideoData::Create(mInfo,
                                      mDecoder->GetImageContainer(),
                                      mPageOffset,
                                      time,
                                      endTime,
                                      b,
                                      isKeyframe,
                                      aPacket->granulepos);
--- a/content/media/ogg/nsOggReader.h
+++ b/content/media/ogg/nsOggReader.h
@@ -85,17 +85,17 @@ public:
   }
 
   virtual PRBool HasVideo()
   {
     mozilla::MonitorAutoEnter mon(mMonitor);
     return mTheoraState != 0 && mTheoraState->mActive;
   }
 
-  virtual nsresult ReadMetadata();
+  virtual nsresult ReadMetadata(nsVideoInfo* aInfo);
   virtual nsresult Seek(PRInt64 aTime, PRInt64 aStartTime, PRInt64 aEndTime, PRInt64 aCurrentTime);
   virtual nsresult GetBuffered(nsTimeRanges* aBuffered, PRInt64 aStartTime);
 
 private:
 
   PRBool HasSkeleton()
   {
     MonitorAutoEnter mon(mMonitor);
@@ -189,17 +189,18 @@ private:
                       PRBool aCachedDataOnly,
                       ogg_sync_state* aState);
 
   // Decodes one packet of Vorbis data, storing the resulting chunks of
   // PCM samples in aChunks.
   nsresult DecodeVorbis(nsTArray<nsAutoPtr<SoundData> >& aChunks,
                         ogg_packet* aPacket);
 
-  // May return NS_ERROR_OUT_OF_MEMORY.
+  // May return NS_ERROR_OUT_OF_MEMORY. Caller must have obtained the
+  // reader's monitor.
   nsresult DecodeTheora(nsTArray<nsAutoPtr<VideoData> >& aFrames,
                         ogg_packet* aPacket);
 
   // Read a page of data from the Ogg file. Returns the offset of the start
   // of the page, or -1 if the page read failed.
   PRInt64 ReadOggPage(ogg_page* aPage);
 
   // Read a packet for an Ogg bitstream/codec state. Returns PR_TRUE on
--- a/content/media/raw/nsRawReader.cpp
+++ b/content/media/raw/nsRawReader.cpp
@@ -63,17 +63,17 @@ nsresult nsRawReader::Init(nsBuiltinDeco
 }
 
 nsresult nsRawReader::ResetDecode()
 {
   mCurrentFrame = 0;
   return nsBuiltinDecoderReader::ResetDecode();
 }
 
-nsresult nsRawReader::ReadMetadata()
+nsresult nsRawReader::ReadMetadata(nsVideoInfo* aInfo)
 {
   NS_ASSERTION(mDecoder->OnStateMachineThread(),
                "Should be on state machine thread.");
   mozilla::MonitorAutoEnter autoEnter(mMonitor);
 
   nsMediaStream* stream = mDecoder->GetCurrentStream();
   NS_ASSERTION(stream, "Decoder has no media stream");
 
@@ -129,16 +129,18 @@ nsresult nsRawReader::ReadMetadata()
   if (length != -1) {
     mozilla::MonitorAutoExit autoExitMonitor(mMonitor);
     mozilla::MonitorAutoEnter autoMonitor(mDecoder->GetMonitor());
     mDecoder->GetStateMachine()->SetDuration(1000 *
                                            (length - sizeof(nsRawVideoHeader)) /
                                            (mFrameSize * mFrameRate));
   }
 
+  *aInfo = mInfo;
+
   return NS_OK;
 }
 
  PRBool nsRawReader::DecodeAudioData()
 {
   NS_ASSERTION(mDecoder->OnStateMachineThread() || mDecoder->OnDecodeThread(),
                "Should be on state machine thread or decode thread.");
   return PR_FALSE;
--- a/content/media/raw/nsRawReader.h
+++ b/content/media/raw/nsRawReader.h
@@ -105,17 +105,17 @@ public:
     return PR_FALSE;
   }
 
   virtual PRBool HasVideo()
   {
     return PR_TRUE;
   }
 
-  virtual nsresult ReadMetadata();
+  virtual nsresult ReadMetadata(nsVideoInfo* aInfo);
   virtual nsresult Seek(PRInt64 aTime, PRInt64 aStartTime, PRInt64 aEndTime, PRInt64 aCurrentTime);
   virtual PRInt64 FindEndTime(PRInt64 aEndOffset);
   virtual nsresult GetBuffered(nsTimeRanges* aBuffered, PRInt64 aStartTime);
 
 private:
   PRBool ReadFromStream(nsMediaStream *aStream, PRUint8 *aBuf,
                         PRUint32 aLength);
 
--- a/content/media/webm/nsWebMReader.cpp
+++ b/content/media/webm/nsWebMReader.cpp
@@ -204,17 +204,17 @@ nsresult nsWebMReader::ResetDecode()
 void nsWebMReader::Cleanup()
 {
   if (mContext) {
     nestegg_destroy(mContext);
     mContext = nsnull;
   }
 }
 
-nsresult nsWebMReader::ReadMetadata()
+nsresult nsWebMReader::ReadMetadata(nsVideoInfo* aInfo)
 {
   NS_ASSERTION(mDecoder->OnStateMachineThread(), "Should be on state machine thread.");
   MonitorAutoEnter mon(mMonitor);
 
   nestegg_io io;
   io.read = webm_read;
   io.seek = webm_seek;
   io.tell = webm_tell;
@@ -391,16 +391,18 @@ nsresult nsWebMReader::ReadMetadata()
       }
 
       mInfo.mAudioRate = mVorbisDsp.vi->rate;
       mInfo.mAudioChannels = mVorbisDsp.vi->channels;
       mChannels = mInfo.mAudioChannels;
     }
   }
 
+  *aInfo = mInfo;
+
   return NS_OK;
 }
 
 ogg_packet nsWebMReader::InitOggPacket(unsigned char* aData,
                                        size_t aLength,
                                        PRBool aBOS,
                                        PRBool aEOS,
                                        PRInt64 aGranulepos)
--- a/content/media/webm/nsWebMReader.h
+++ b/content/media/webm/nsWebMReader.h
@@ -148,17 +148,17 @@ public:
   }
 
   virtual PRBool HasVideo()
   {
     mozilla::MonitorAutoEnter mon(mMonitor);
     return mHasVideo;
   }
 
-  virtual nsresult ReadMetadata();
+  virtual nsresult ReadMetadata(nsVideoInfo* aInfo);
   virtual nsresult Seek(PRInt64 aTime, PRInt64 aStartTime, PRInt64 aEndTime, PRInt64 aCurrentTime);
   virtual nsresult GetBuffered(nsTimeRanges* aBuffered, PRInt64 aStartTime);
   virtual void NotifyDataArrived(const char* aBuffer, PRUint32 aLength, PRUint32 aOffset);
 
 private:
   // Value passed to NextPacket to determine if we are reading a video or an
   // audio packet.
   enum TrackType {