Bug 474748 - Defer YUV conversion until frame paint, and allow SetRGBData to assume ownership of the RGB buffer rather than copying it. r=chris.double, sr=roc
authorMatthew Gregan <kinetik@flim.org>
Tue, 17 Mar 2009 16:45:00 +1300
changeset 26338 3de6334ba553ecbe4a86053d661eca205e564f84
parent 26337 fd0eac910c255005c8bf88d40f030712d5bf5a24
child 26339 0ef8d47d12610e84e86c5395d7ed21fc930585e9
push id6019
push userrocallahan@mozilla.com
push dateThu, 19 Mar 2009 08:05:16 +0000
treeherdermozilla-central@a8880afe4027 [default view] [failures only]
perfherder[talos] [build metrics] [platform microbench] (compared to previous push)
reviewerschris.double, roc
bugs474748
milestone1.9.2a1pre
Bug 474748 - Defer YUV conversion until frame paint, and allow SetRGBData to assume ownership of the RGB buffer rather than copying it. r=chris.double, sr=roc
content/media/video/public/nsMediaDecoder.h
content/media/video/src/nsMediaDecoder.cpp
content/media/video/src/nsOggDecoder.cpp
--- a/content/media/video/public/nsMediaDecoder.h
+++ b/content/media/video/public/nsMediaDecoder.h
@@ -241,25 +241,22 @@ class nsMediaDecoder : public nsIObserve
 protected:
 
   // Start timer to update download progress information.
   nsresult StartProgress();
 
   // Stop progress information timer.
   nsresult StopProgress();
 
-  // Set the RGB width, height and framerate. The passed RGB buffer is
-  // copied to the mRGB buffer. This also allocates the mRGB buffer if
-  // needed.
-  // This is the only nsMediaDecoder method that may be called 
-  // from threads other than the main thread.
-  // It must be called with the mVideoUpdateLock held.
-  void SetRGBData(PRInt32 aWidth, 
-                  PRInt32 aHeight, 
-                  float aFramerate, 
+  // Set the RGB width, height and framerate. Ownership of the passed RGB
+  // buffer is transferred to the decoder.  This is the only nsMediaDecoder
+  // method that may be called from threads other than the main thread.
+  void SetRGBData(PRInt32 aWidth,
+                  PRInt32 aHeight,
+                  float aFramerate,
                   unsigned char* aRGBBuffer);
 
   /**
    * This class is useful for estimating rates of data passing through
    * some channel. The idea is that activity on the channel "starts"
    * and "stops" over time. At certain times data passes through the
    * channel (usually while the channel is active; data passing through
    * an inactive channel is ignored). The GetRate() function computes
--- a/content/media/video/src/nsMediaDecoder.cpp
+++ b/content/media/video/src/nsMediaDecoder.cpp
@@ -195,35 +195,25 @@ nsresult nsMediaDecoder::StopProgress()
   nsresult rv = mProgressTimer->Cancel();
   mProgressTimer = nsnull;
 
   return rv;
 }
 
 void nsMediaDecoder::SetRGBData(PRInt32 aWidth, PRInt32 aHeight, float aFramerate, unsigned char* aRGBBuffer)
 {
+  nsAutoLock lock(mVideoUpdateLock);
+
   if (mRGBWidth != aWidth || mRGBHeight != aHeight) {
     mRGBWidth = aWidth;
     mRGBHeight = aHeight;
     mSizeChanged = PR_TRUE;
-    // Delete buffer so we'll reallocate it
-    mRGB = nsnull;
   }
   mFramerate = aFramerate;
-
-  if (aRGBBuffer) {
-    if (!mRGB) {
-      mRGB = new unsigned char[aWidth * aHeight * 4];
-    }
-    if (mRGB) {
-      memcpy(mRGB.get(), aRGBBuffer, aWidth*aHeight*4);
-    }
-  } else {
-    mRGB = nsnull;
-  }
+  mRGB = aRGBBuffer;
 }
 
 void nsMediaDecoder::Paint(gfxContext* aContext, const gfxRect& aRect)
 {
   nsAutoLock lock(mVideoUpdateLock);
 
   if (!mRGB)
     return;
--- a/content/media/video/src/nsOggDecoder.cpp
+++ b/content/media/video/src/nsOggDecoder.cpp
@@ -123,45 +123,61 @@
 */
 class nsOggDecodeStateMachine : public nsRunnable
 {
 public:
   // Object to hold the decoded data from a frame
   class FrameData {
   public:
     FrameData() :
+      mVideoHeader(nsnull),
       mVideoWidth(0),
       mVideoHeight(0),
+      mUVWidth(0),
+      mUVHeight(0),
       mDecodedFrameTime(0.0),
       mTime(0.0)
     {
       MOZ_COUNT_CTOR(FrameData);
     }
 
     ~FrameData()
     {
       MOZ_COUNT_DTOR(FrameData);
+
+      if (mVideoHeader) {
+        oggplay_callback_info_unlock_item(mVideoHeader);
+      }
     }
 
     // Write the audio data from the frame to the Audio stream.
     void Write(nsAudioStream* aStream)
     {
       PRUint32 length = mAudioData.Length();
       if (length == 0)
         return;
 
       aStream->Write(mAudioData.Elements(), length);
     }
 
+    void SetVideoHeader(OggPlayDataHeader* aVideoHeader)
+    {
+      NS_ABORT_IF_FALSE(!mVideoHeader, "Frame already owns a video header");
+      mVideoHeader = aVideoHeader;
+      oggplay_callback_info_lock_item(mVideoHeader);
+    }
+
     // The position in the stream where this frame ended, in bytes
     PRInt64 mEndStreamPosition;
-    nsAutoArrayPtr<unsigned char> mVideoData;
+    OggPlayDataHeader* mVideoHeader;
     nsTArray<float> mAudioData;
     int mVideoWidth;
     int mVideoHeight;
+    int mUVWidth;
+    int mUVHeight;
     float mDecodedFrameTime;
     float mTime;
     OggPlayStreamInfo mState;
   };
 
   // A queue of decoded video frames. 
   class FrameQueue
   {
@@ -318,17 +334,17 @@ public:
     return mState == nsOggDecodeStateMachine::DECODER_STATE_BUFFERING;
   }
 
 protected:
   // Convert the OggPlay frame information into a format used by Gecko
   // (RGB for video, float for sound, etc).The decoder monitor must be
   // acquired in the scope of calls to these functions. They must be
   // called only when the current state > DECODING_METADATA.
-  void HandleVideoData(FrameData* aFrame, int aTrackNum, OggPlayVideoData* aVideoData);
+  void HandleVideoData(FrameData* aFrame, int aTrackNum, OggPlayDataHeader* aVideoHeader);
   void HandleAudioData(FrameData* aFrame, OggPlayAudioData* aAudioData, int aSize);
 
   // These methods can only be called on the decoding thread.
   void LoadOggHeaders(nsChannelReader* aReader);
 
   // Initializes and opens the audio stream. Called from the decode
   // thread only. Must be called with the decode monitor held.
   void OpenAudioStream();
@@ -558,17 +574,17 @@ nsOggDecodeStateMachine::FrameData* nsOg
   float audioTime = 0.0;
   float videoTime = 0.0;
 
   if (mVideoTrack != -1 &&
       num_tracks > mVideoTrack &&
       oggplay_callback_info_get_type(info[mVideoTrack]) == OGGPLAY_YUV_VIDEO) {
     OggPlayDataHeader** headers = oggplay_callback_info_get_headers(info[mVideoTrack]);
     videoTime = ((float)oggplay_callback_info_get_presentation_time(headers[0]))/1000.0;
-    HandleVideoData(frame, mVideoTrack, oggplay_callback_info_get_video_data(headers[0]));
+    HandleVideoData(frame, mVideoTrack, headers[0]);
   }
 
   if (mAudioTrack != -1 &&
       num_tracks > mAudioTrack &&
       oggplay_callback_info_get_type(info[mAudioTrack]) == OGGPLAY_FLOATS_AUDIO) {
     OggPlayDataHeader** headers = oggplay_callback_info_get_headers(info[mAudioTrack]);
     audioTime = ((float)oggplay_callback_info_get_presentation_time(headers[0]))/1000.0;
     int required = oggplay_callback_info_get_required(info[mAudioTrack]);
@@ -589,58 +605,36 @@ nsOggDecodeStateMachine::FrameData* nsOg
     frame->mState = OGGPLAY_STREAM_UNINITIALISED;
 
   frame->mDecodedFrameTime = mVideoTrack == -1 ? audioTime : videoTime;
 
   oggplay_buffer_release(mPlayer, info);
   return frame;
 }
 
-void nsOggDecodeStateMachine::HandleVideoData(FrameData* aFrame, int aTrackNum, OggPlayVideoData* aVideoData) {
-  if (!aVideoData)
+void nsOggDecodeStateMachine::HandleVideoData(FrameData* aFrame, int aTrackNum, OggPlayDataHeader* aVideoHeader) {
+  if (!aVideoHeader)
     return;
 
   int y_width;
   int y_height;
   oggplay_get_video_y_size(mPlayer, aTrackNum, &y_width, &y_height);
   int uv_width;
   int uv_height;
   oggplay_get_video_uv_size(mPlayer, aTrackNum, &uv_width, &uv_height);
 
   if (y_width >= MAX_VIDEO_WIDTH || y_height >= MAX_VIDEO_HEIGHT) {
     return;
   }
 
   aFrame->mVideoWidth = y_width;
   aFrame->mVideoHeight = y_height;
-  aFrame->mVideoData = new unsigned char[y_width * y_height * 4];
-  if (!aFrame->mVideoData) {
-    return;
-  }
-
-  OggPlayYUVChannels yuv;
-  OggPlayRGBChannels rgb;
-      
-  yuv.ptry = aVideoData->y;
-  yuv.ptru = aVideoData->u;
-  yuv.ptrv = aVideoData->v;
-  yuv.uv_width = uv_width;
-  yuv.uv_height = uv_height;
-  yuv.y_width = y_width;
-  yuv.y_height = y_height;
-      
-  rgb.ptro = aFrame->mVideoData;
-  rgb.rgb_width = aFrame->mVideoWidth;
-  rgb.rgb_height = aFrame->mVideoHeight;
-
-#ifdef IS_BIG_ENDIAN
-  oggplay_yuv2argb(&yuv, &rgb);
-#else
-  oggplay_yuv2bgr(&yuv, &rgb);
-#endif
+  aFrame->mUVWidth = uv_width;
+  aFrame->mUVHeight = uv_height;
+  aFrame->SetVideoHeader(aVideoHeader);
 }
 
 void nsOggDecodeStateMachine::HandleAudioData(FrameData* aFrame, OggPlayAudioData* aAudioData, int aSize) {
   // 'aSize' is number of samples. Multiply by number of channels to
   // get the actual number of floats being sent.
   int size = aSize * mAudioChannels;
 
   aFrame->mAudioData.AppendElements(reinterpret_cast<float*>(aAudioData), size);
@@ -714,22 +708,45 @@ void nsOggDecodeStateMachine::PlayFrame(
       }
     }
   }
 }
 
 void nsOggDecodeStateMachine::PlayVideo(FrameData* aFrame)
 {
   //  NS_ASSERTION(PR_InMonitor(mDecoder->GetMonitor()), "PlayVideo() called without acquiring decoder monitor");
-  if (aFrame) {
-    if (aFrame->mVideoData) {
-      nsAutoLock lock(mDecoder->mVideoUpdateLock);
+  if (aFrame && aFrame->mVideoHeader) {
+    OggPlayVideoData* videoData = oggplay_callback_info_get_video_data(aFrame->mVideoHeader);
+
+    OggPlayYUVChannels yuv;
+    yuv.ptry = videoData->y;
+    yuv.ptru = videoData->u;
+    yuv.ptrv = videoData->v;
+    yuv.uv_width = aFrame->mUVWidth;
+    yuv.uv_height = aFrame->mUVHeight;
+    yuv.y_width = aFrame->mVideoWidth;
+    yuv.y_height = aFrame->mVideoHeight;
 
-      mDecoder->SetRGBData(aFrame->mVideoWidth, aFrame->mVideoHeight, mFramerate, aFrame->mVideoData);
-    }
+    size_t size = aFrame->mVideoWidth * aFrame->mVideoHeight * 4;
+    nsAutoArrayPtr<unsigned char> buffer(new unsigned char[size]);
+    if (!buffer)
+      return;
+
+    OggPlayRGBChannels rgb;
+    rgb.ptro = buffer;
+    rgb.rgb_width = aFrame->mVideoWidth;
+    rgb.rgb_height = aFrame->mVideoHeight;
+
+#ifdef IS_BIG_ENDIAN
+    oggplay_yuv2argb(&yuv, &rgb);
+#else
+    oggplay_yuv2bgr(&yuv, &rgb);
+#endif
+
+    mDecoder->SetRGBData(aFrame->mVideoWidth, aFrame->mVideoHeight, mFramerate, buffer.forget());
   }
 }
 
 void nsOggDecodeStateMachine::PlayAudio(FrameData* aFrame)
 {
   //  NS_ASSERTION(PR_InMonitor(mDecoder->GetMonitor()), "PlayAudio() called without acquiring decoder monitor");
   if (!mAudioStream)
     return;
@@ -1181,20 +1198,17 @@ void nsOggDecodeStateMachine::LoadOggHea
         oggplay_get_video_fps(mPlayer, i, &fpsd, &fpsn);
         mFramerate = fpsd == 0 ? 0.0 : float(fpsn)/float(fpsd);
         mCallbackPeriod = 1.0 / mFramerate;
         LOG(PR_LOG_DEBUG, ("Frame rate: %f", mFramerate));
 
         int y_width;
         int y_height;
         oggplay_get_video_y_size(mPlayer, i, &y_width, &y_height);
-        {
-          nsAutoLock lock(mDecoder->mVideoUpdateLock);
-          mDecoder->SetRGBData(y_width, y_height, mFramerate, nsnull);
-        }
+        mDecoder->SetRGBData(y_width, y_height, mFramerate, nsnull);
       }
       else if (mAudioTrack == -1 && oggplay_get_track_type(mPlayer, i) == OGGZ_CONTENT_VORBIS) {
         mAudioTrack = i;
         oggplay_set_offset(mPlayer, i, OGGPLAY_AUDIO_OFFSET);
         oggplay_get_audio_samplerate(mPlayer, i, &mAudioRate);
         oggplay_get_audio_channels(mPlayer, i, &mAudioChannels);
         LOG(PR_LOG_DEBUG, ("samplerate: %d, channels: %d", mAudioRate, mAudioChannels));
       }