Bug 422540 - GStreamer backend for audio/video decoding. r=cdouble, a=npotb
authorAlessandro Decina <alessandro.d@gmail.com>
Wed, 18 Apr 2012 18:33:13 -0400
changeset 95248 c04a467c48ac5c15d16da548f24203ef55df670e
parent 95247 364a452825725e96dc5e9d3171b5a6ff63b2275e
child 95249 9d11d469890f3bda809f7590d756587e190a0921
push id886
push userlsblakk@mozilla.com
push dateMon, 04 Jun 2012 19:57:52 +0000
treeherdermozilla-beta@bbd8d5efd6d1 [default view] [failures only]
perfherder[talos] [build metrics] [platform microbench] (compared to previous push)
reviewerscdouble, npotb
bugs422540
milestone14.0a1
first release with
nightly linux32
nightly linux64
nightly mac
nightly win32
nightly win64
last release without
nightly linux32
nightly linux64
nightly mac
nightly win32
nightly win64
Bug 422540 - GStreamer backend for audio/video decoding. r=cdouble, a=npotb
config/autoconf.mk.in
config/system-headers
configure.in
content/html/content/public/nsHTMLMediaElement.h
content/html/content/src/nsHTMLMediaElement.cpp
content/media/Makefile.in
content/media/gstreamer/Makefile.in
content/media/gstreamer/nsGStreamerDecoder.cpp
content/media/gstreamer/nsGStreamerDecoder.h
content/media/gstreamer/nsGStreamerReader.cpp
content/media/gstreamer/nsGStreamerReader.h
content/media/nsBuiltinDecoderReader.h
content/media/nsBuiltinDecoderStateMachine.cpp
js/src/config/system-headers
layout/build/Makefile.in
modules/libpref/src/init/all.js
toolkit/library/Makefile.in
--- a/config/autoconf.mk.in
+++ b/config/autoconf.mk.in
@@ -166,16 +166,17 @@ MOZ_RAW = @MOZ_RAW@
 MOZ_SYDNEYAUDIO = @MOZ_SYDNEYAUDIO@
 MOZ_CUBEB = @MOZ_CUBEB@
 MOZ_WAVE = @MOZ_WAVE@
 MOZ_MEDIA = @MOZ_MEDIA@
 MOZ_VORBIS = @MOZ_VORBIS@
 MOZ_TREMOR = @MOZ_TREMOR@
 MOZ_NO_THEORA_ASM = @MOZ_NO_THEORA_ASM@
 MOZ_WEBM = @MOZ_WEBM@
+MOZ_GSTREAMER = @MOZ_GSTREAMER@
 MOZ_VP8_ERROR_CONCEALMENT = @MOZ_VP8_ERROR_CONCEALMENT@
 MOZ_VP8_ENCODER = @MOZ_VP8_ENCODER@
 VPX_AS = @VPX_AS@
 VPX_ASFLAGS = @VPX_ASFLAGS@
 VPX_DASH_C_FLAG = @VPX_DASH_C_FLAG@
 VPX_AS_CONVERSION = @VPX_AS_CONVERSION@
 VPX_ASM_SUFFIX = @VPX_ASM_SUFFIX@
 VPX_X86_ASM = @VPX_X86_ASM@
@@ -657,16 +658,19 @@ MOZ_PLATFORM_MAEMO = @MOZ_PLATFORM_MAEMO
 MOZ_PLATFORM_MAEMO_CFLAGS	= @MOZ_PLATFORM_MAEMO_CFLAGS@
 MOZ_PLATFORM_MAEMO_LIBS 	= @MOZ_PLATFORM_MAEMO_LIBS@
 MOZ_MAEMO_LIBLOCATION 	= @MOZ_MAEMO_LIBLOCATION@
 
 MOZ_ENABLE_LIBCONIC = @MOZ_ENABLE_LIBCONIC@
 LIBCONIC_CFLAGS     = @LIBCONIC_CFLAGS@
 LIBCONIC_LIBS       = @LIBCONIC_LIBS@
 
+GSTREAMER_CFLAGS = @GSTREAMER_CFLAGS@
+GSTREAMER_LIBS   = @GSTREAMER_LIBS@
+
 MACOS_SDK_DIR	= @MACOS_SDK_DIR@
 NEXT_ROOT	= @NEXT_ROOT@
 GCC_VERSION	= @GCC_VERSION@
 UNIVERSAL_BINARY= @UNIVERSAL_BINARY@
 MOZ_CAN_RUN_PROGRAMS = @MOZ_CAN_RUN_PROGRAMS@
 HAVE_DTRACE= @HAVE_DTRACE@
 
 VISIBILITY_FLAGS = @VISIBILITY_FLAGS@
--- a/config/system-headers
+++ b/config/system-headers
@@ -1050,8 +1050,12 @@ sydneyaudio/sydney_audio.h
 vorbis/codec.h
 theora/theoradec.h
 tremor/ivorbiscodec.h
 ogg/ogg.h
 ogg/os_types.h
 nestegg/nestegg.h
 cubeb/cubeb.h
 #endif
+gst/gst.h
+gst/app/gstappsink.h
+gst/app/gstappsrc.h
+gst/video/video.h
--- a/configure.in
+++ b/configure.in
@@ -5790,16 +5790,59 @@ linux*)
       PKG_CHECK_MODULES(MOZ_ALSA, alsa, ,
          [echo "$MOZ_ALSA_PKG_ERRORS"
           AC_MSG_ERROR([Need alsa for Ogg, Wave or WebM decoding on Linux.  Disable with --disable-ogg --disable-wave --disable-webm.  (On Ubuntu, you might try installing the package libasound2-dev.)])])
       ;;
    esac
 fi
 
 dnl ========================================================
+dnl = Enable GStreamer
+dnl ========================================================
+MOZ_ARG_ENABLE_BOOL(gstreamer,
+[  --enable-gstreamer           Enable GStreamer support],
+MOZ_GSTREAMER=1,
+MOZ_GSTREAMER=)
+
+if test "$MOZ_GSTREAMER"; then
+    # API version, eg 0.10, 1.0 etc
+    GST_API_VERSION=0.10
+    # core/base release number
+    # depend on >= 0.10.33 as that's when the playbin2 source-setup signal was
+    # introduced
+    GST_VERSION=0.10.33
+    PKG_CHECK_MODULES(GSTREAMER,
+                      gstreamer-$GST_API_VERSION >= $GST_VERSION
+                      gstreamer-app-$GST_API_VERSION
+                      gstreamer-plugins-base-$GST_API_VERSION)
+    if test -n "$GSTREAMER_LIBS"; then
+       _SAVE_LDFLAGS=$LDFLAGS
+       LDFLAGS="$LDFLAGS -lgstvideo-$GST_API_VERSION"
+       AC_TRY_LINK(,[return 0;],_HAVE_LIBGSTVIDEO=1,_HAVE_LIBGSTVIDEO=)
+       if test -n "$_HAVE_LIBGSTVIDEO" ; then
+          GSTREAMER_LIBS="$GSTREAMER_LIBS -lgstvideo-$GST_API_VERSION"
+       else
+          AC_MSG_ERROR([gstreamer video backend requires libgstvideo])
+       fi
+       LDFLAGS=$_SAVE_LDFLAGS
+    else
+       AC_MSG_ERROR([gstreamer backend requires the gstreamer packages])
+    fi
+fi
+AC_SUBST(GSTREAMER_CFLAGS)
+AC_SUBST(GSTREAMER_LIBS)
+AC_SUBST(MOZ_GSTREAMER)
+
+if test -n "$MOZ_GSTREAMER"; then
+   AC_DEFINE(MOZ_GSTREAMER)
+   MOZ_MEDIA=1
+fi
+
+
+dnl ========================================================
 dnl Permissions System
 dnl ========================================================
 MOZ_ARG_DISABLE_BOOL(permissions,
 [  --disable-permissions   Disable permissions (popup and cookie blocking)],
     MOZ_PERMISSIONS=,
     MOZ_PERMISSIONS=1
 )
 
--- a/content/html/content/public/nsHTMLMediaElement.h
+++ b/content/html/content/public/nsHTMLMediaElement.h
@@ -297,16 +297,23 @@ public:
 
 #ifdef MOZ_WEBM
   static bool IsWebMEnabled();
   static bool IsWebMType(const nsACString& aType);
   static const char gWebMTypes[2][17];
   static char const *const gWebMCodecs[4];
 #endif
 
+#ifdef MOZ_GSTREAMER
+  static bool IsH264Enabled();
+  static bool IsH264Type(const nsACString& aType);
+  static const char gH264Types[3][17];
+  static char const *const gH264Codecs[6];
+#endif
+
   /**
    * Called when a child source element is added to this media element. This
    * may queue a task to run the select resource algorithm if appropriate.
    */
   void NotifyAddedSource();
 
   /**
    * Called when there's been an error fetching the resource. This decides
--- a/content/html/content/src/nsHTMLMediaElement.cpp
+++ b/content/html/content/src/nsHTMLMediaElement.cpp
@@ -101,16 +101,19 @@
 #include "nsWaveDecoder.h"
 #endif
 #ifdef MOZ_WEBM
 #include "nsWebMDecoder.h"
 #endif
 #ifdef MOZ_RAW
 #include "nsRawDecoder.h"
 #endif
+#ifdef MOZ_GSTREAMER
+#include "nsGStreamerDecoder.h"
+#endif
 
 #ifdef PR_LOGGING
 static PRLogModuleInfo* gMediaElementLog;
 static PRLogModuleInfo* gMediaElementEventsLog;
 #define LOG(type, msg) PR_LOG(gMediaElementLog, type, msg)
 #define LOG_EVENT(type, msg) PR_LOG(gMediaElementEventsLog, type, msg)
 #else
 #define LOG(type, msg)
@@ -1841,16 +1844,55 @@ nsHTMLMediaElement::IsWebMType(const nsA
       return true;
     }
   }
 
   return false;
 }
 #endif
 
+#ifdef MOZ_GSTREAMER
+const char nsHTMLMediaElement::gH264Types[3][17] = {
+  "video/mp4",
+  "video/3gpp",
+  "video/quicktime",
+};
+
+char const *const nsHTMLMediaElement::gH264Codecs[6] = {
+  "avc1.42E01E",
+  "avc1.58A01E",
+  "avc1.4D401E",
+  "avc1.64001E",
+  "mp4a.40.2",
+  nsnull
+};
+
+bool
+nsHTMLMediaElement::IsH264Enabled()
+{
+  return Preferences::GetBool("media.h264.enabled");
+}
+
+bool
+nsHTMLMediaElement::IsH264Type(const nsACString& aType)
+{
+  if (!IsH264Enabled()) {
+    return false;
+  }
+
+  for (PRUint32 i = 0; i < ArrayLength(gH264Types); ++i) {
+    if (aType.EqualsASCII(gH264Types[i])) {
+      return true;
+    }
+  }
+
+  return false;
+}
+#endif
+
 /* static */
 nsHTMLMediaElement::CanPlayStatus 
 nsHTMLMediaElement::CanHandleMediaType(const char* aMIMEType,
                                        char const *const ** aCodecList)
 {
 #ifdef MOZ_RAW
   if (IsRawType(nsDependentCString(aMIMEType))) {
     *aCodecList = gRawCodecs;
@@ -1870,16 +1912,23 @@ nsHTMLMediaElement::CanHandleMediaType(c
   }
 #endif
 #ifdef MOZ_WEBM
   if (IsWebMType(nsDependentCString(aMIMEType))) {
     *aCodecList = gWebMCodecs;
     return CANPLAY_YES;
   }
 #endif
+
+#ifdef MOZ_GSTREAMER
+  if (IsH264Type(nsDependentCString(aMIMEType))) {
+    *aCodecList = gH264Codecs;
+    return CANPLAY_YES;
+  }
+#endif
   return CANPLAY_NO;
 }
 
 /* static */
 bool nsHTMLMediaElement::ShouldHandleMediaType(const char* aMIMEType)
 {
 #ifdef MOZ_RAW
   if (IsRawType(nsDependentCString(aMIMEType)))
@@ -1982,33 +2031,50 @@ nsHTMLMediaElement::CreateDecoder(const 
     nsRefPtr<nsRawDecoder> decoder = new nsRawDecoder();
     if (decoder->Init(this)) {
       return decoder.forget();
     }
   }
 #endif
 #ifdef MOZ_OGG
   if (IsOggType(aType)) {
+#ifdef MOZ_GSTREAMER 
+    nsRefPtr<nsGStreamerDecoder> decoder = new nsGStreamerDecoder();
+#else
     nsRefPtr<nsOggDecoder> decoder = new nsOggDecoder();
+#endif
     if (decoder->Init(this)) {
       return decoder.forget();
     }
   }
 #endif
 #ifdef MOZ_WAVE
   if (IsWaveType(aType)) {
     nsRefPtr<nsWaveDecoder> decoder = new nsWaveDecoder();
     if (decoder->Init(this)) {
       return decoder.forget();
     }
   }
 #endif
 #ifdef MOZ_WEBM
   if (IsWebMType(aType)) {
+#ifdef MOZ_GSTREAMER 
+    nsRefPtr<nsGStreamerDecoder> decoder = new nsGStreamerDecoder();
+#else
     nsRefPtr<nsWebMDecoder> decoder = new nsWebMDecoder();
+#endif
+    if (decoder->Init(this)) {
+      return decoder.forget();
+    }
+  }
+#endif
+
+#ifdef MOZ_GSTREAMER 
+  if (IsH264Type(aType)) {
+    nsRefPtr<nsGStreamerDecoder> decoder = new nsGStreamerDecoder();
     if (decoder->Init(this)) {
       return decoder.forget();
     }
   }
 #endif
   return nsnull;
 }
 
--- a/content/media/Makefile.in
+++ b/content/media/Makefile.in
@@ -91,16 +91,20 @@ endif
 ifdef MOZ_WAVE
 PARALLEL_DIRS += wave
 endif
 
 ifdef MOZ_WEBM
 PARALLEL_DIRS += webm
 endif
 
+ifdef MOZ_GSTREAMER
+PARALLEL_DIRS += gstreamer
+endif
+
 ifdef ENABLE_TESTS
 PARALLEL_DIRS += test
 endif
 
 FORCE_STATIC_LIB = 1
 
 include $(topsrcdir)/config/config.mk
 include $(topsrcdir)/ipc/chromium/chromium-config.mk
new file mode 100644
--- /dev/null
+++ b/content/media/gstreamer/Makefile.in
@@ -0,0 +1,38 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this file,
+# You can obtain one at http://mozilla.org/MPL/2.0/.
+
+DEPTH		= ../../..
+topsrcdir	= @top_srcdir@
+srcdir		= @srcdir@
+VPATH		= @srcdir@
+
+include $(DEPTH)/config/autoconf.mk
+
+MODULE		= content
+LIBRARY_NAME	= gkcongstreamer_s
+LIBXUL_LIBRARY 	= 1
+
+
+EXPORTS		+= \
+		nsGStreamerDecoder.h \
+		$(NULL)
+
+CPPSRCS		= \
+		nsGStreamerReader.cpp \
+		nsGStreamerDecoder.cpp \
+		$(NULL)
+
+FORCE_STATIC_LIB = 1
+
+include $(topsrcdir)/config/rules.mk
+
+CFLAGS		+= $(GSTREAMER_CFLAGS)
+CXXFLAGS	+= $(GSTREAMER_CFLAGS)
+
+
+INCLUDES	+= \
+		-I$(srcdir)/../../base/src \
+		-I$(srcdir)/../../html/content/src \
+		$(NULL)
+
new file mode 100644
--- /dev/null
+++ b/content/media/gstreamer/nsGStreamerDecoder.cpp
@@ -0,0 +1,14 @@
+/* -*- Mode: C++; tab-width: 2; indent-tabs-mode: nil; c-basic-offset: 2 -*- */
+/* vim:set ts=2 sw=2 sts=2 et cindent: */
+/* This Source Code Form is subject to the terms of the Mozilla Public
+ * License, v. 2.0. If a copy of the MPL was not distributed with this file,
+ * You can obtain one at http://mozilla.org/MPL/2.0/. */
+
+#include "nsBuiltinDecoderStateMachine.h"
+#include "nsGStreamerReader.h"
+#include "nsGStreamerDecoder.h"
+
+nsDecoderStateMachine* nsGStreamerDecoder::CreateStateMachine()
+{
+  return new nsBuiltinDecoderStateMachine(this, new nsGStreamerReader(this));
+}
new file mode 100644
--- /dev/null
+++ b/content/media/gstreamer/nsGStreamerDecoder.h
@@ -0,0 +1,19 @@
+/* -*- Mode: C++; tab-width: 2; indent-tabs-mode: nil; c-basic-offset: 2 -*- */
+/* vim:set ts=2 sw=2 sts=2 et cindent: */
+/* This Source Code Form is subject to the terms of the Mozilla Public
+ * License, v. 2.0. If a copy of the MPL was not distributed with this file,
+ * You can obtain one at http://mozilla.org/MPL/2.0/. */
+
+#if !defined(nsGStreamerDecoder_h_)
+#define nsGStreamerDecoder_h_
+
+#include "nsBuiltinDecoder.h"
+
+class nsGStreamerDecoder : public nsBuiltinDecoder
+{
+public:
+  virtual nsMediaDecoder* Clone() { return new nsGStreamerDecoder(); }
+  virtual nsDecoderStateMachine* CreateStateMachine();
+};
+
+#endif
new file mode 100644
--- /dev/null
+++ b/content/media/gstreamer/nsGStreamerReader.cpp
@@ -0,0 +1,815 @@
+/* -*- Mode: C++; tab-width: 2; indent-tabs-mode: nil; c-basic-offset: 2 -*- */
+/* vim:set ts=2 sw=2 sts=2 et cindent: */
+/* This Source Code Form is subject to the terms of the Mozilla Public
+ * License, v. 2.0. If a copy of the MPL was not distributed with this file,
+ * You can obtain one at http://mozilla.org/MPL/2.0/. */
+
+#include "nsError.h"
+#include "nsBuiltinDecoderStateMachine.h"
+#include "nsBuiltinDecoder.h"
+#include "MediaResource.h"
+#include "nsGStreamerReader.h"
+#include "VideoUtils.h"
+#include "nsTimeRanges.h"
+#include "mozilla/Preferences.h"
+
+using namespace mozilla;
+using namespace mozilla::layers;
+
+// Un-comment to enable logging of seek bisections.
+//#define SEEK_LOGGING
+
+#ifdef PR_LOGGING
+extern PRLogModuleInfo* gBuiltinDecoderLog;
+#define LOG(type, msg) PR_LOG(gBuiltinDecoderLog, type, msg)
+#else
+#define LOG(type, msg)
+#endif
+
+static const int MAX_CHANNELS = 4;
+// Let the demuxer work in pull mode for short files
+static const int SHORT_FILE_SIZE = 1024 * 1024;
+// The default resource->Read() size when working in push mode
+static const int DEFAULT_SOURCE_READ_SIZE = 50 * 1024;
+
+typedef enum {
+  GST_PLAY_FLAG_VIDEO         = (1 << 0),
+  GST_PLAY_FLAG_AUDIO         = (1 << 1),
+  GST_PLAY_FLAG_TEXT          = (1 << 2),
+  GST_PLAY_FLAG_VIS           = (1 << 3),
+  GST_PLAY_FLAG_SOFT_VOLUME   = (1 << 4),
+  GST_PLAY_FLAG_NATIVE_AUDIO  = (1 << 5),
+  GST_PLAY_FLAG_NATIVE_VIDEO  = (1 << 6),
+  GST_PLAY_FLAG_DOWNLOAD      = (1 << 7),
+  GST_PLAY_FLAG_BUFFERING     = (1 << 8),
+  GST_PLAY_FLAG_DEINTERLACE   = (1 << 9),
+  GST_PLAY_FLAG_SOFT_COLORBALANCE = (1 << 10)
+} PlayFlags;
+
+nsGStreamerReader::nsGStreamerReader(nsBuiltinDecoder* aDecoder)
+  : nsBuiltinDecoderReader(aDecoder),
+  mPlayBin(NULL),
+  mBus(NULL),
+  mSource(NULL),
+  mVideoSink(NULL),
+  mVideoAppSink(NULL),
+  mAudioSink(NULL),
+  mAudioAppSink(NULL),
+  mFormat(GST_VIDEO_FORMAT_UNKNOWN),
+  mVideoSinkBufferCount(0),
+  mAudioSinkBufferCount(0),
+  mGstThreadsMonitor("media.gst.threads"),
+  mReachedEos(false),
+  mByteOffset(0),
+  mLastReportedByteOffset(0),
+  fpsNum(0),
+  fpsDen(0)
+{
+  MOZ_COUNT_CTOR(nsGStreamerReader);
+
+  mSrcCallbacks.need_data = nsGStreamerReader::NeedDataCb;
+  mSrcCallbacks.enough_data = nsGStreamerReader::EnoughDataCb;
+  mSrcCallbacks.seek_data = nsGStreamerReader::SeekDataCb;
+
+  mSinkCallbacks.eos = nsGStreamerReader::EosCb;
+  mSinkCallbacks.new_preroll = nsGStreamerReader::NewPrerollCb;
+  mSinkCallbacks.new_buffer = nsGStreamerReader::NewBufferCb;
+  mSinkCallbacks.new_buffer_list = NULL;
+
+  gst_segment_init(&mVideoSegment, GST_FORMAT_UNDEFINED);
+  gst_segment_init(&mAudioSegment, GST_FORMAT_UNDEFINED);
+}
+
+nsGStreamerReader::~nsGStreamerReader()
+{
+  MOZ_COUNT_DTOR(nsGStreamerReader);
+  ResetDecode();
+
+  if (mPlayBin) {
+    gst_app_src_end_of_stream(mSource);
+    gst_element_set_state(mPlayBin, GST_STATE_NULL);
+    gst_object_unref(mPlayBin);
+    mPlayBin = NULL;
+    mVideoSink = NULL;
+    mVideoAppSink = NULL;
+    mAudioSink = NULL;
+    mAudioAppSink = NULL;
+    gst_object_unref(mBus);
+    mBus = NULL;
+  }
+}
+
+nsresult nsGStreamerReader::Init(nsBuiltinDecoderReader* aCloneDonor)
+{
+  GError *error = NULL;
+  if (!gst_init_check(0, 0, &error)) {
+    LOG(PR_LOG_ERROR, ("gst initialization failed: %s", error->message));
+    g_error_free(error);
+    return NS_ERROR_FAILURE;
+  }
+
+  mPlayBin = gst_element_factory_make("playbin2", NULL);
+  if (mPlayBin == NULL) {
+    LOG(PR_LOG_ERROR, ("couldn't create playbin2"));
+    return NS_ERROR_FAILURE;
+  }
+  g_object_set(mPlayBin, "buffer-size", 0, NULL);
+  mBus = gst_pipeline_get_bus(GST_PIPELINE(mPlayBin));
+
+  mVideoSink = gst_parse_bin_from_description("capsfilter name=filter ! "
+      "appsink name=videosink sync=true max-buffers=1 "
+      "caps=video/x-raw-yuv,format=(fourcc)I420"
+      , TRUE, NULL);
+  mVideoAppSink = GST_APP_SINK(gst_bin_get_by_name(GST_BIN(mVideoSink),
+        "videosink"));
+  gst_app_sink_set_callbacks(mVideoAppSink, &mSinkCallbacks,
+      (gpointer) this, NULL);
+  GstPad *sinkpad = gst_element_get_pad(GST_ELEMENT(mVideoAppSink), "sink");
+  gst_pad_add_event_probe(sinkpad,
+      G_CALLBACK(&nsGStreamerReader::EventProbeCb), this);
+  gst_object_unref(sinkpad);
+
+  mAudioSink = gst_parse_bin_from_description("capsfilter name=filter ! "
+        "appsink name=audiosink sync=true caps=audio/x-raw-float,"
+        "channels={1,2},rate=44100,width=32,endianness=1234", TRUE, NULL);
+  mAudioAppSink = GST_APP_SINK(gst_bin_get_by_name(GST_BIN(mAudioSink),
+        "audiosink"));
+  gst_app_sink_set_callbacks(mAudioAppSink, &mSinkCallbacks,
+      (gpointer) this, NULL);
+  sinkpad = gst_element_get_pad(GST_ELEMENT(mAudioAppSink), "sink");
+  gst_pad_add_event_probe(sinkpad,
+      G_CALLBACK(&nsGStreamerReader::EventProbeCb), this);
+  gst_object_unref(sinkpad);
+
+  g_object_set(mPlayBin, "uri", "appsrc://",
+      "video-sink", mVideoSink,
+      "audio-sink", mAudioSink,
+      NULL);
+
+  g_object_connect(mPlayBin, "signal::source-setup",
+      nsGStreamerReader::PlayBinSourceSetupCb, this, NULL);
+
+  return NS_OK;
+}
+
+void nsGStreamerReader::PlayBinSourceSetupCb(GstElement *aPlayBin,
+                                             GstElement *aSource,
+                                             gpointer aUserData)
+{
+  nsGStreamerReader *reader = reinterpret_cast<nsGStreamerReader*>(aUserData);
+  reader->PlayBinSourceSetup(GST_APP_SRC(aSource));
+}
+
+void nsGStreamerReader::PlayBinSourceSetup(GstAppSrc *aSource)
+{
+  mSource = GST_APP_SRC(aSource);
+  gst_app_src_set_callbacks(mSource, &mSrcCallbacks, (gpointer) this, NULL);
+  MediaResource* resource = mDecoder->GetResource();
+  PRInt64 len = resource->GetLength();
+  gst_app_src_set_size(mSource, len);
+  if (resource->IsDataCachedToEndOfResource(0) ||
+      (len != -1 && len <= SHORT_FILE_SIZE)) {
+    /* let the demuxer work in pull mode for local files (or very short files)
+     * so that we get optimal seeking accuracy/performance
+     */
+    LOG(PR_LOG_ERROR, ("configuring random access"));
+    gst_app_src_set_stream_type(mSource, GST_APP_STREAM_TYPE_RANDOM_ACCESS);
+  } else {
+    /* make the demuxer work in push mode so that seeking is kept to a minimum
+     */
+    gst_app_src_set_stream_type(mSource, GST_APP_STREAM_TYPE_SEEKABLE);
+  }
+}
+
+nsresult nsGStreamerReader::ReadMetadata(nsVideoInfo* aInfo)
+{
+  NS_ASSERTION(mDecoder->OnDecodeThread(), "Should be on decode thread.");
+  nsresult ret = NS_OK;
+
+  /* We do 3 attempts here: decoding audio and video, decoding video only,
+   * decoding audio only. This allows us to play streams that have one broken
+   * stream but that are otherwise decodeable.
+   */
+  guint flags[3] = {GST_PLAY_FLAG_VIDEO|GST_PLAY_FLAG_AUDIO,
+    ~GST_PLAY_FLAG_AUDIO, ~GST_PLAY_FLAG_VIDEO};
+  guint default_flags, current_flags;
+  g_object_get(mPlayBin, "flags", &default_flags, NULL);
+
+  GstMessage *message = NULL;
+  for (int i=0; i < G_N_ELEMENTS(flags); i++) {
+    current_flags = default_flags & flags[i];
+    g_object_set(G_OBJECT(mPlayBin), "flags", current_flags, NULL);
+
+    /* reset filter caps to ANY */
+    GstCaps *caps = gst_caps_new_any();
+    GstElement *filter = gst_bin_get_by_name(GST_BIN(mAudioSink), "filter");
+    g_object_set(filter, "caps", caps, NULL);
+    gst_object_unref(filter);
+
+    filter = gst_bin_get_by_name(GST_BIN(mVideoSink), "filter");
+    g_object_set(filter, "caps", caps, NULL);
+    gst_object_unref(filter);
+    gst_caps_unref(caps);
+    filter = NULL;
+
+    if (!(current_flags & GST_PLAY_FLAG_AUDIO))
+      filter = gst_bin_get_by_name(GST_BIN(mAudioSink), "filter");
+    else if (!(current_flags & GST_PLAY_FLAG_VIDEO))
+      filter = gst_bin_get_by_name(GST_BIN(mVideoSink), "filter");
+
+    if (filter) {
+      /* Little trick: set the target caps to "skip" so that playbin2 fails to
+       * find a decoder for the stream we want to skip.
+       */
+      GstCaps *filterCaps = gst_caps_new_simple ("skip", NULL);
+      g_object_set(filter, "caps", filterCaps, NULL);
+      gst_caps_unref(filterCaps);
+      gst_object_unref(filter);
+    }
+
+    /* start the pipeline */
+    gst_element_set_state(mPlayBin, GST_STATE_PAUSED);
+
+    /* Wait for ASYNC_DONE, which is emitted when the pipeline is built,
+     * prerolled and ready to play. Also watch for errors.
+     */
+    message = gst_bus_timed_pop_filtered(mBus, GST_CLOCK_TIME_NONE,
+       (GstMessageType)(GST_MESSAGE_ASYNC_DONE | GST_MESSAGE_ERROR));
+    if (GST_MESSAGE_TYPE(message) == GST_MESSAGE_ERROR) {
+      GError *error;
+      gchar *debug;
+
+      gst_message_parse_error(message, &error, &debug);
+      LOG(PR_LOG_ERROR, ("read metadata error: %s: %s", error->message,
+            debug));
+      g_error_free(error);
+      g_free(debug);
+      gst_element_set_state(mPlayBin, GST_STATE_NULL);
+      gst_message_unref(message);
+      ret = NS_ERROR_FAILURE;
+    } else {
+      gst_message_unref(message);
+      ret = NS_OK;
+      break;
+    }
+  }
+
+  if (NS_FAILED(ret))
+    /* we couldn't get this to play */
+    return ret;
+
+  /* FIXME: workaround for a bug in matroskademux. This seek makes matroskademux
+   * parse the index */
+  if (gst_element_seek_simple(mPlayBin, GST_FORMAT_TIME,
+        GST_SEEK_FLAG_FLUSH, 0)) {
+    /* after a seek we need to wait again for ASYNC_DONE */
+    message = gst_bus_timed_pop_filtered(mBus, GST_CLOCK_TIME_NONE,
+       (GstMessageType)(GST_MESSAGE_ASYNC_DONE | GST_MESSAGE_ERROR));
+    if (GST_MESSAGE_TYPE(message) == GST_MESSAGE_ERROR) {
+      gst_element_set_state(mPlayBin, GST_STATE_NULL);
+      gst_message_unref(message);
+      return NS_ERROR_FAILURE;
+    }
+  }
+
+  /* report the duration */
+  gint64 duration;
+  GstFormat format = GST_FORMAT_TIME;
+  if (gst_element_query_duration(GST_ELEMENT(mPlayBin),
+      &format, &duration) && format == GST_FORMAT_TIME) {
+    ReentrantMonitorAutoEnter mon(mDecoder->GetReentrantMonitor());
+    LOG(PR_LOG_DEBUG, ("returning duration %"GST_TIME_FORMAT,
+          GST_TIME_ARGS (duration)));
+    duration = GST_TIME_AS_USECONDS (duration);
+    mDecoder->GetStateMachine()->SetDuration(duration);
+  }
+
+  int n_video = 0, n_audio = 0;
+  g_object_get(mPlayBin, "n-video", &n_video, "n-audio", &n_audio, NULL);
+  mInfo.mHasVideo = n_video != 0;
+  mInfo.mHasAudio = n_audio != 0;
+
+  *aInfo = mInfo;
+
+  /* set the pipeline to PLAYING so that it starts decoding and queueing data in
+   * the appsinks */
+  gst_element_set_state(mPlayBin, GST_STATE_PLAYING);
+
+  return NS_OK;
+}
+
+nsresult nsGStreamerReader::ResetDecode()
+{
+  nsresult res = NS_OK;
+
+  if (NS_FAILED(nsBuiltinDecoderReader::ResetDecode())) {
+    res = NS_ERROR_FAILURE;
+  }
+
+  mVideoQueue.Reset();
+  mAudioQueue.Reset();
+
+  mVideoSinkBufferCount = 0;
+  mAudioSinkBufferCount = 0;
+  mReachedEos = false;
+  mLastReportedByteOffset = 0;
+  mByteOffset = 0;
+
+  return res;
+}
+
+void nsGStreamerReader::NotifyBytesConsumed()
+{
+  NS_ASSERTION(mByteOffset >= mLastReportedByteOffset,
+      "current byte offset less than prev offset");
+  mDecoder->NotifyBytesConsumed(mByteOffset - mLastReportedByteOffset);
+  mLastReportedByteOffset = mByteOffset;
+}
+
+bool nsGStreamerReader::WaitForDecodedData(int *aCounter)
+{
+  ReentrantMonitorAutoEnter mon(mGstThreadsMonitor);
+
+  /* Report consumed bytes from here as we can't do it from gst threads */
+  NotifyBytesConsumed();
+  while(*aCounter == 0) {
+    if (mReachedEos) {
+      return false;
+    }
+    mon.Wait();
+    NotifyBytesConsumed();
+  }
+  (*aCounter)--;
+
+  return true;
+}
+
+bool nsGStreamerReader::DecodeAudioData()
+{
+  NS_ASSERTION(mDecoder->OnDecodeThread(), "Should be on decode thread.");
+
+  if (!WaitForDecodedData(&mAudioSinkBufferCount)) {
+    mAudioQueue.Finish();
+    return false;
+  }
+
+  GstBuffer *buffer = gst_app_sink_pull_buffer(mAudioAppSink);
+  PRInt64 timestamp = GST_BUFFER_TIMESTAMP(buffer);
+  timestamp = gst_segment_to_stream_time(&mAudioSegment,
+      GST_FORMAT_TIME, timestamp);
+  timestamp = GST_TIME_AS_USECONDS(timestamp);
+  PRInt64 duration = 0;
+  if (GST_CLOCK_TIME_IS_VALID(GST_BUFFER_DURATION(buffer)))
+    duration = GST_TIME_AS_USECONDS(GST_BUFFER_DURATION(buffer));
+
+  PRInt64 offset = GST_BUFFER_OFFSET(buffer);
+  unsigned int size = GST_BUFFER_SIZE(buffer);
+  PRInt32 frames = (size / sizeof(AudioDataValue)) / mInfo.mAudioChannels;
+  ssize_t outSize = static_cast<size_t>(size / sizeof(AudioDataValue));
+  nsAutoArrayPtr<AudioDataValue> data(new AudioDataValue[outSize]);
+  memcpy(data, GST_BUFFER_DATA(buffer), GST_BUFFER_SIZE(buffer));
+  AudioData *audio = new AudioData(offset, timestamp, duration,
+      frames, data.forget(), mInfo.mAudioChannels);
+
+  mAudioQueue.Push(audio);
+  gst_buffer_unref(buffer);
+
+  if (mAudioQueue.GetSize() < 2) {
+    nsCOMPtr<nsIRunnable> event =
+      NS_NewRunnableMethod(mDecoder, &nsBuiltinDecoder::NextFrameAvailable);
+    NS_DispatchToMainThread(event, NS_DISPATCH_NORMAL);
+  }
+
+  return true;
+}
+
+bool nsGStreamerReader::DecodeVideoFrame(bool &aKeyFrameSkip,
+                                         PRInt64 aTimeThreshold)
+{
+  NS_ASSERTION(mDecoder->OnDecodeThread(), "Should be on decode thread.");
+
+  GstBuffer *buffer = NULL;
+  PRInt64 timestamp, nextTimestamp;
+  while (true)
+  {
+    if (!WaitForDecodedData(&mVideoSinkBufferCount)) {
+      mVideoQueue.Finish();
+      break;
+    }
+    mDecoder->GetFrameStatistics().NotifyDecodedFrames(0, 1);
+
+    buffer = gst_app_sink_pull_buffer(mVideoAppSink);
+    bool isKeyframe = !GST_BUFFER_FLAG_IS_SET(buffer, GST_BUFFER_FLAG_DISCONT);
+    if ((aKeyFrameSkip && !isKeyframe)) {
+      gst_buffer_unref(buffer);
+      buffer = NULL;
+      continue;
+    }
+
+    timestamp = GST_BUFFER_TIMESTAMP(buffer);
+    {
+      ReentrantMonitorAutoEnter mon(mGstThreadsMonitor);
+      timestamp = gst_segment_to_stream_time(&mVideoSegment,
+          GST_FORMAT_TIME, timestamp);
+    }
+    NS_ASSERTION(GST_CLOCK_TIME_IS_VALID(timestamp),
+        "frame has invalid timestamp");
+    timestamp = nextTimestamp = GST_TIME_AS_USECONDS(timestamp);
+    if (GST_CLOCK_TIME_IS_VALID(GST_BUFFER_DURATION(buffer)))
+      nextTimestamp += GST_TIME_AS_USECONDS(GST_BUFFER_DURATION(buffer));
+    else if (fpsNum && fpsDen)
+      /* add 1-frame duration */
+      nextTimestamp += gst_util_uint64_scale(GST_USECOND, fpsNum, fpsDen);
+
+    if (timestamp < aTimeThreshold) {
+      LOG(PR_LOG_DEBUG, ("skipping frame %"GST_TIME_FORMAT
+            " threshold %"GST_TIME_FORMAT,
+            GST_TIME_ARGS(timestamp), GST_TIME_ARGS(aTimeThreshold)));
+      gst_buffer_unref(buffer);
+      buffer = NULL;
+      continue;
+    }
+
+    break;
+  }
+
+  if (buffer == NULL)
+    /* no more frames */
+    return false;
+
+  guint8 *data = GST_BUFFER_DATA(buffer);
+
+  int width = mPicture.width;
+  int height = mPicture.height;
+  GstVideoFormat format = mFormat;
+
+  VideoData::YCbCrBuffer b;
+  for(int i = 0; i < 3; i++) {
+    b.mPlanes[i].mData = data + gst_video_format_get_component_offset(format, i,
+        width, height);
+    b.mPlanes[i].mStride = gst_video_format_get_row_stride(format, i, width);
+    b.mPlanes[i].mHeight = gst_video_format_get_component_height(format,
+        i, height);
+    b.mPlanes[i].mWidth = gst_video_format_get_component_width(format,
+        i, width);
+  }
+
+  bool isKeyframe = !GST_BUFFER_FLAG_IS_SET(buffer,
+      GST_BUFFER_FLAG_DELTA_UNIT);
+  /* XXX ? */
+  PRInt64 offset = 0;
+  VideoData *video = VideoData::Create(mInfo,
+                                       mDecoder->GetImageContainer(),
+                                       offset,
+                                       timestamp,
+                                       nextTimestamp,
+                                       b,
+                                       isKeyframe,
+                                       -1,
+                                       mPicture);
+  mVideoQueue.Push(video);
+  gst_buffer_unref(buffer);
+
+  if (mVideoQueue.GetSize() < 2) {
+    nsCOMPtr<nsIRunnable> event =
+      NS_NewRunnableMethod(mDecoder, &nsBuiltinDecoder::NextFrameAvailable);
+    NS_DispatchToMainThread(event, NS_DISPATCH_NORMAL);
+  }
+
+  return true;
+}
+
+nsresult nsGStreamerReader::Seek(PRInt64 aTarget,
+                                 PRInt64 aStartTime,
+                                 PRInt64 aEndTime,
+                                 PRInt64 aCurrentTime)
+{
+  NS_ASSERTION(mDecoder->OnDecodeThread(), "Should be on decode thread.");
+
+  gint64 seekPos = aTarget * GST_USECOND;
+  LOG(PR_LOG_DEBUG, ("%p About to seek to %"GST_TIME_FORMAT,
+        mDecoder, GST_TIME_ARGS(seekPos)));
+
+  if (!gst_element_seek_simple(mPlayBin, GST_FORMAT_TIME,
+    static_cast<GstSeekFlags>(GST_SEEK_FLAG_FLUSH | GST_SEEK_FLAG_ACCURATE), seekPos)) {
+    LOG(PR_LOG_ERROR, ("seek failed"));
+    return NS_ERROR_FAILURE;
+  }
+  LOG(PR_LOG_DEBUG, ("seek succeeded"));
+
+  return DecodeToTarget(aTarget);
+}
+
+nsresult nsGStreamerReader::GetBuffered(nsTimeRanges* aBuffered,
+                                        PRInt64 aStartTime)
+{
+  GstFormat format = GST_FORMAT_TIME;
+  MediaResource* resource = mDecoder->GetResource();
+  gint64 resourceLength = resource->GetLength();
+  nsTArray<MediaByteRange> ranges;
+  resource->GetCachedRanges(ranges);
+
+  if (mDecoder->OnStateMachineThread())
+    /* Report the position from here while buffering as we can't report it from
+     * the gstreamer threads that are actually reading from the resource
+     */
+    NotifyBytesConsumed();
+
+  if (resource->IsDataCachedToEndOfResource(0)) {
+    /* fast path for local or completely cached files */
+    gint64 duration = 0;
+    GstFormat format = GST_FORMAT_TIME;
+
+    duration = QueryDuration();
+    double end = (double) duration / GST_MSECOND;
+    LOG(PR_LOG_DEBUG, ("complete range [0, %f] for [0, %li]",
+          end, resourceLength));
+    aBuffered->Add(0, end);
+    return NS_OK;
+  }
+
+  for(PRUint32 index = 0; index < ranges.Length(); index++) {
+    PRInt64 startOffset = ranges[index].mStart;
+    PRInt64 endOffset = ranges[index].mEnd;
+    gint64 startTime, endTime;
+
+    if (!gst_element_query_convert(GST_ELEMENT(mPlayBin), GST_FORMAT_BYTES,
+      startOffset, &format, &startTime) || format != GST_FORMAT_TIME)
+      continue;
+    if (!gst_element_query_convert(GST_ELEMENT(mPlayBin), GST_FORMAT_BYTES,
+      endOffset, &format, &endTime) || format != GST_FORMAT_TIME)
+      continue;
+
+    double start = start = (double) GST_TIME_AS_USECONDS (startTime) / GST_MSECOND;
+    double end = (double) GST_TIME_AS_USECONDS (endTime) / GST_MSECOND;
+    LOG(PR_LOG_DEBUG, ("adding range [%f, %f] for [%li %li] size %li",
+          start, end, startOffset, endOffset, resourceLength));
+    aBuffered->Add(start, end);
+  }
+
+  return NS_OK;
+}
+
+void nsGStreamerReader::ReadAndPushData(guint aLength)
+{
+  MediaResource* resource = mDecoder->GetResource();
+  NS_ASSERTION(resource, "Decoder has no media resource");
+  nsresult rv = NS_OK;
+
+  GstBuffer *buffer = gst_buffer_new_and_alloc(aLength);
+  guint8 *data = GST_BUFFER_DATA(buffer);
+  PRUint32 size = 0, bytesRead = 0;
+  while(bytesRead < aLength) {
+    rv = resource->Read(reinterpret_cast<char*>(data + bytesRead),
+        aLength - bytesRead, &size);
+    if (NS_FAILED(rv) || size == 0)
+      break;
+
+    bytesRead += size;
+  }
+
+  GST_BUFFER_SIZE(buffer) = bytesRead;
+  mByteOffset += bytesRead;
+
+  GstFlowReturn ret = gst_app_src_push_buffer(mSource, gst_buffer_ref(buffer));
+  if (ret != GST_FLOW_OK)
+    LOG(PR_LOG_ERROR, ("ReadAndPushData push ret %s", gst_flow_get_name(ret)));
+
+  if (GST_BUFFER_SIZE (buffer) < aLength)
+    /* If we read less than what we wanted, we reached the end */
+    gst_app_src_end_of_stream(mSource);
+
+  gst_buffer_unref(buffer);
+}
+
+PRInt64 nsGStreamerReader::QueryDuration()
+{
+  gint64 duration = 0;
+  GstFormat format = GST_FORMAT_TIME;
+
+  if (gst_element_query_duration(GST_ELEMENT(mPlayBin),
+      &format, &duration)) {
+    if (format == GST_FORMAT_TIME) {
+      LOG(PR_LOG_DEBUG, ("pipeline duration %"GST_TIME_FORMAT,
+            GST_TIME_ARGS (duration)));
+      duration = GST_TIME_AS_USECONDS (duration);
+    }
+  }
+
+  if (mDecoder->mDuration != -1 &&
+      mDecoder->mDuration > duration) {
+    /* We decoded more than the reported duration (which could be estimated) */
+    LOG(PR_LOG_DEBUG, ("mDuration > duration"));
+    duration = mDecoder->mDuration;
+  }
+
+  return duration;
+}
+
+void nsGStreamerReader::NeedDataCb(GstAppSrc *aSrc,
+                                   guint aLength,
+                                   gpointer aUserData)
+{
+  nsGStreamerReader *reader = (nsGStreamerReader *) aUserData;
+  reader->NeedData(aSrc, aLength);
+}
+
+void nsGStreamerReader::NeedData(GstAppSrc *aSrc, guint aLength)
+{
+  if (aLength == -1)
+    aLength = DEFAULT_SOURCE_READ_SIZE;
+  ReadAndPushData(aLength);
+}
+
+void nsGStreamerReader::EnoughDataCb(GstAppSrc *aSrc, gpointer aUserData)
+{
+  nsGStreamerReader *reader = (nsGStreamerReader *) aUserData;
+  reader->EnoughData(aSrc);
+}
+
+void nsGStreamerReader::EnoughData(GstAppSrc *aSrc)
+{
+}
+
+gboolean nsGStreamerReader::SeekDataCb(GstAppSrc *aSrc,
+                                       guint64 aOffset,
+                                       gpointer aUserData)
+{
+  nsGStreamerReader *reader = (nsGStreamerReader *) aUserData;
+  return reader->SeekData(aSrc, aOffset);
+}
+
+gboolean nsGStreamerReader::SeekData(GstAppSrc *aSrc, guint64 aOffset)
+{
+  ReentrantMonitorAutoEnter mon(mGstThreadsMonitor);
+  MediaResource* resource = mDecoder->GetResource();
+
+  if (gst_app_src_get_size(mSource) == -1)
+    /* It's possible that we didn't know the length when we initialized mSource
+     * but maybe we do now
+     */
+    gst_app_src_set_size(mSource, resource->GetLength());
+
+  nsresult rv = NS_ERROR_FAILURE;
+  if (aOffset < resource->GetLength())
+    rv = resource->Seek(SEEK_SET, aOffset);
+
+  if (NS_SUCCEEDED(rv))
+    mByteOffset = mLastReportedByteOffset = aOffset;
+  else
+    LOG(PR_LOG_ERROR, ("seek at %lu failed", aOffset));
+
+  return NS_SUCCEEDED(rv);
+}
+
+gboolean nsGStreamerReader::EventProbeCb(GstPad *aPad,
+                                         GstEvent *aEvent,
+                                         gpointer aUserData)
+{
+  nsGStreamerReader *reader = (nsGStreamerReader *) aUserData;
+  return reader->EventProbe(aPad, aEvent);
+}
+
+gboolean nsGStreamerReader::EventProbe(GstPad *aPad, GstEvent *aEvent)
+{
+  GstElement *parent = GST_ELEMENT(gst_pad_get_parent(aPad));
+  switch(GST_EVENT_TYPE(aEvent)) {
+    case GST_EVENT_NEWSEGMENT:
+    {
+      gboolean update;
+      gdouble rate;
+      GstFormat format;
+      gint64 start, stop, position;
+      GstSegment *segment;
+
+      /* Store the segments so we can convert timestamps to stream time, which
+       * is what the upper layers sync on.
+       */
+      ReentrantMonitorAutoEnter mon(mGstThreadsMonitor);
+      gst_event_parse_new_segment(aEvent, &update, &rate, &format,
+          &start, &stop, &position);
+      if (parent == GST_ELEMENT(mVideoAppSink))
+        segment = &mVideoSegment;
+      else
+        segment = &mAudioSegment;
+      gst_segment_set_newsegment(segment, update, rate, format,
+          start, stop, position);
+      break;
+    }
+    case GST_EVENT_FLUSH_STOP:
+      /* Reset on seeks */
+      ResetDecode();
+      break;
+    default:
+      break;
+  }
+  gst_object_unref(parent);
+
+  return TRUE;
+}
+
+GstFlowReturn nsGStreamerReader::NewPrerollCb(GstAppSink *aSink,
+                                              gpointer aUserData)
+{
+  nsGStreamerReader *reader = (nsGStreamerReader *) aUserData;
+
+  if (aSink == reader->mVideoAppSink)
+    reader->VideoPreroll();
+  else
+    reader->AudioPreroll();
+  return GST_FLOW_OK;
+}
+
+void nsGStreamerReader::AudioPreroll()
+{
+  /* The first audio buffer has reached the audio sink. Get rate and channels */
+  LOG(PR_LOG_DEBUG, ("Audio preroll"));
+  GstPad *sinkpad = gst_element_get_pad(GST_ELEMENT(mAudioAppSink), "sink");
+  GstCaps *caps = gst_pad_get_negotiated_caps(sinkpad);
+  GstStructure *s = gst_caps_get_structure(caps, 0);
+  mInfo.mAudioRate = mInfo.mAudioChannels = 0;
+  gst_structure_get_int(s, "rate", (gint *) &mInfo.mAudioRate);
+  gst_structure_get_int(s, "channels", (gint *) &mInfo.mAudioChannels);
+  NS_ASSERTION(mInfo.mAudioRate != 0, ("audio rate is zero"));
+  NS_ASSERTION(mInfo.mAudioChannels != 0, ("audio channels is zero"));
+  NS_ASSERTION(mInfo.mAudioChannels > 0 && mInfo.mAudioChannels <= MAX_CHANNELS,
+      "invalid audio channels number");
+  mInfo.mHasAudio = true;
+  gst_caps_unref(caps);
+  gst_object_unref(sinkpad);
+}
+
+void nsGStreamerReader::VideoPreroll()
+{
+  /* The first video buffer has reached the video sink. Get width and height */
+  LOG(PR_LOG_DEBUG, ("Video preroll"));
+  GstPad *sinkpad = gst_element_get_pad(GST_ELEMENT(mVideoAppSink), "sink");
+  GstCaps *caps = gst_pad_get_negotiated_caps(sinkpad);
+  gst_video_format_parse_caps(caps, &mFormat, &mPicture.width, &mPicture.height);
+  GstStructure *structure = gst_caps_get_structure(caps, 0);
+  gst_structure_get_fraction(structure, "framerate", &fpsNum, &fpsDen);
+  NS_ASSERTION(mPicture.width && mPicture.height, "invalid video resolution");
+  mInfo.mDisplay = nsIntSize(mPicture.width, mPicture.height);
+  mInfo.mHasVideo = true;
+  gst_caps_unref(caps);
+  gst_object_unref(sinkpad);
+}
+
+GstFlowReturn nsGStreamerReader::NewBufferCb(GstAppSink *aSink,
+                                             gpointer aUserData)
+{
+  nsGStreamerReader *reader = (nsGStreamerReader *) aUserData;
+
+  if (aSink == reader->mVideoAppSink)
+    reader->NewVideoBuffer();
+  else
+    reader->NewAudioBuffer();
+
+  return GST_FLOW_OK;
+}
+
+void nsGStreamerReader::NewVideoBuffer()
+{
+  ReentrantMonitorAutoEnter mon(mGstThreadsMonitor);
+  /* We have a new video buffer queued in the video sink. Increment the counter
+   * and notify the decode thread potentially blocked in DecodeVideoFrame
+   */
+  mDecoder->GetFrameStatistics().NotifyDecodedFrames(1, 0);
+  mVideoSinkBufferCount++;
+  mon.NotifyAll();
+}
+
+void nsGStreamerReader::NewAudioBuffer()
+{
+  ReentrantMonitorAutoEnter mon(mGstThreadsMonitor);
+  /* We have a new audio buffer queued in the audio sink. Increment the counter
+   * and notify the decode thread potentially blocked in DecodeAudioData
+   */
+  mAudioSinkBufferCount++;
+  mon.NotifyAll();
+}
+
+void nsGStreamerReader::EosCb(GstAppSink *aSink, gpointer aUserData)
+{
+  nsGStreamerReader *reader = (nsGStreamerReader *) aUserData;
+  reader->Eos(aSink);
+}
+
+void nsGStreamerReader::Eos(GstAppSink *aSink)
+{
+  /* We reached the end of the stream */
+  {
+    ReentrantMonitorAutoEnter mon(mGstThreadsMonitor);
+    /* Potentially unblock DecodeVideoFrame and DecodeAudioData */
+    mReachedEos = true;
+    mon.NotifyAll();
+  }
+
+  {
+    ReentrantMonitorAutoEnter mon(mDecoder->GetReentrantMonitor());
+    /* Potentially unblock the decode thread in ::DecodeLoop */
+    mVideoQueue.Finish();
+    mAudioQueue.Finish();
+    mon.NotifyAll();
+  }
+}
new file mode 100644
--- /dev/null
+++ b/content/media/gstreamer/nsGStreamerReader.h
@@ -0,0 +1,134 @@
+/* This Source Code Form is subject to the terms of the Mozilla Public
+ * License, v. 2.0. If a copy of the MPL was not distributed with this file,
+ * You can obtain one at http://mozilla.org/MPL/2.0/. */
+
+#if !defined(nsGStreamerReader_h_)
+#define nsGStreamerReader_h_
+
+#include <gst/gst.h>
+#include <gst/app/gstappsrc.h>
+#include <gst/app/gstappsink.h>
+#include <gst/video/video.h>
+#include "nsBuiltinDecoderReader.h"
+
+using namespace mozilla;
+
+class nsMediaDecoder;
+class nsTimeRanges;
+
+class nsGStreamerReader : public nsBuiltinDecoderReader
+{
+public:
+  nsGStreamerReader(nsBuiltinDecoder* aDecoder);
+  virtual ~nsGStreamerReader();
+
+  virtual nsresult Init(nsBuiltinDecoderReader* aCloneDonor);
+  virtual nsresult ResetDecode();
+  virtual bool DecodeAudioData();
+  virtual bool DecodeVideoFrame(bool &aKeyframeSkip,
+                                PRInt64 aTimeThreshold);
+  virtual nsresult ReadMetadata(nsVideoInfo* aInfo);
+  virtual nsresult Seek(PRInt64 aTime,
+                        PRInt64 aStartTime,
+                        PRInt64 aEndTime,
+                        PRInt64 aCurrentTime);
+  virtual nsresult GetBuffered(nsTimeRanges* aBuffered, PRInt64 aStartTime);
+
+  virtual bool HasAudio() {
+    return mInfo.mHasAudio;
+  }
+
+  virtual bool HasVideo() {
+    return mInfo.mHasVideo;
+  }
+
+private:
+
+  void ReadAndPushData(guint aLength);
+  bool WaitForDecodedData(int *counter);
+  void NotifyBytesConsumed();
+  PRInt64 QueryDuration();
+
+  /* Gst callbacks */
+
+  /* Called on the source-setup signal emitted by playbin. Used to
+   * configure appsrc .
+   */
+  static void PlayBinSourceSetupCb(GstElement *aPlayBin,
+                                   GstElement *aSource,
+                                   gpointer aUserData);
+  void PlayBinSourceSetup(GstAppSrc *aSource);
+
+  /* Called from appsrc when we need to read more data from the resource */
+  static void NeedDataCb(GstAppSrc *aSrc, guint aLength, gpointer aUserData);
+  void NeedData(GstAppSrc *aSrc, guint aLength);
+
+  /* Called when appsrc has enough data and we can stop reading */
+  static void EnoughDataCb(GstAppSrc *aSrc, gpointer aUserData);
+  void EnoughData(GstAppSrc *aSrc);
+
+  /* Called when a seek is issued on the pipeline */
+  static gboolean SeekDataCb(GstAppSrc *aSrc,
+                             guint64 aOffset,
+                             gpointer aUserData);
+  gboolean SeekData(GstAppSrc *aSrc, guint64 aOffset);
+
+  /* Called when events reach the sinks. See inline comments */
+  static gboolean EventProbeCb(GstPad *aPad, GstEvent *aEvent, gpointer aUserData);
+  gboolean EventProbe(GstPad *aPad, GstEvent *aEvent);
+
+  /* Called when the pipeline is prerolled, that is when at start or after a
+   * seek, the first audio and video buffers are queued in the sinks.
+   */
+  static GstFlowReturn NewPrerollCb(GstAppSink *aSink, gpointer aUserData);
+  void VideoPreroll();
+  void AudioPreroll();
+
+  /* Called when buffers reach the sinks */
+  static GstFlowReturn NewBufferCb(GstAppSink *aSink, gpointer aUserData);
+  void NewVideoBuffer();
+  void NewAudioBuffer();
+
+  /* Called at end of stream, when decoding has finished */
+  static void EosCb(GstAppSink *aSink, gpointer aUserData);
+  void Eos(GstAppSink *aSink);
+
+  GstElement *mPlayBin;
+  GstBus *mBus;
+  GstAppSrc *mSource;
+  /* video sink bin */
+  GstElement *mVideoSink;
+  /* the actual video app sink */
+  GstAppSink *mVideoAppSink;
+  /* audio sink bin */
+  GstElement *mAudioSink;
+  /* the actual audio app sink */
+  GstAppSink *mAudioAppSink;
+  GstVideoFormat mFormat;
+  nsIntRect mPicture;
+  int mVideoSinkBufferCount;
+  int mAudioSinkBufferCount;
+  GstAppSrcCallbacks mSrcCallbacks;
+  GstAppSinkCallbacks mSinkCallbacks;
+  /* monitor used to synchronize access to shared state between gstreamer
+   * threads and other gecko threads */
+  mozilla::ReentrantMonitor mGstThreadsMonitor;
+  /* video and audio segments we use to convert absolute timestamps to [0,
+   * stream_duration]. They're set when the pipeline is started or after a seek.
+   * Concurrent access guarded with mGstThreadsMonitor.
+   */
+  GstSegment mVideoSegment;
+  GstSegment mAudioSegment;
+  /* bool used to signal when gst has detected the end of stream and
+   * DecodeAudioData and DecodeVideoFrame should not expect any more data
+   */
+  bool mReachedEos;
+  /* offset we've reached reading from the source */
+  gint64 mByteOffset;
+  /* the last offset we reported with NotifyBytesConsumed */
+  gint64 mLastReportedByteOffset;
+  int fpsNum;
+  int fpsDen;
+};
+
+#endif
--- a/content/media/nsBuiltinDecoderReader.h
+++ b/content/media/nsBuiltinDecoderReader.h
@@ -384,17 +384,17 @@ private:
 // this class can only be accessed on the decode thread.
 class nsBuiltinDecoderReader : public nsRunnable {
 public:
   typedef mozilla::ReentrantMonitor ReentrantMonitor;
   typedef mozilla::ReentrantMonitorAutoEnter ReentrantMonitorAutoEnter;
   typedef mozilla::VideoFrameContainer VideoFrameContainer;
 
   nsBuiltinDecoderReader(nsBuiltinDecoder* aDecoder);
-  ~nsBuiltinDecoderReader();
+  virtual ~nsBuiltinDecoderReader();
 
   // Initializes the reader, returns NS_OK on success, or NS_ERROR_FAILURE
   // on failure.
   virtual nsresult Init(nsBuiltinDecoderReader* aCloneDonor) = 0;
 
   // Resets all state related to decoding, emptying all buffers etc.
   virtual nsresult ResetDecode();
 
--- a/content/media/nsBuiltinDecoderStateMachine.cpp
+++ b/content/media/nsBuiltinDecoderStateMachine.cpp
@@ -453,16 +453,17 @@ nsBuiltinDecoderStateMachine::~nsBuiltin
   MOZ_COUNT_DTOR(nsBuiltinDecoderStateMachine);
   NS_ASSERTION(!StateMachineTracker::Instance().IsQueued(this),
     "Should not have a pending request for a new decode thread");
   NS_ASSERTION(!mRequestedNewDecodeThread,
     "Should not have (or flagged) a pending request for a new decode thread");
   if (mTimer)
     mTimer->Cancel();
   mTimer = nsnull;
+  mReader = nsnull;
  
   StateMachineTracker::Instance().CleanupGlobalStateMachine();
 }
 
 bool nsBuiltinDecoderStateMachine::HasFutureAudio() const {
   mDecoder->GetReentrantMonitor().AssertCurrentThreadIn();
   NS_ASSERTION(HasAudio(), "Should only call HasFutureAudio() when we have audio");
   // We've got audio ready to play if:
--- a/js/src/config/system-headers
+++ b/js/src/config/system-headers
@@ -1050,8 +1050,12 @@ sydneyaudio/sydney_audio.h
 vorbis/codec.h
 theora/theoradec.h
 tremor/ivorbiscodec.h
 ogg/ogg.h
 ogg/os_types.h
 nestegg/nestegg.h
 cubeb/cubeb.h
 #endif
+gst/gst.h
+gst/app/gstappsink.h
+gst/app/gstappsrc.h
+gst/video/video.h
--- a/layout/build/Makefile.in
+++ b/layout/build/Makefile.in
@@ -178,16 +178,22 @@ SHARED_LIBRARY_LIBS 	+= \
 endif
 
 ifdef MOZ_WAVE
 SHARED_LIBRARY_LIBS 	+= \
 	$(DEPTH)/content/media/wave/$(LIB_PREFIX)gkconwave_s.$(LIB_SUFFIX) \
 	$(NULL)
 endif
 
+ifdef MOZ_GSTREAMER
+SHARED_LIBRARY_LIBS 	+= \
+	$(DEPTH)/content/media/gstreamer/$(LIB_PREFIX)gkcongstreamer_s.$(LIB_SUFFIX) \
+	$(NULL)
+endif
+
 ifdef NS_PRINTING
 SHARED_LIBRARY_LIBS += \
 		../printing/$(LIB_PREFIX)gkprinting_s.$(LIB_SUFFIX) \
 		$(NULL)
 endif
 
 ifdef MOZ_XUL
 SHARED_LIBRARY_LIBS += \
@@ -218,16 +224,20 @@ SHARED_LIBRARY_LIBS += \
 
 ifdef ENABLE_EDITOR_API_LOG
 DEFINES += -DENABLE_EDITOR_API_LOG
 endif
 
 SHARED_LIBRARY_LIBS += \
 	$(DEPTH)/js/xpconnect/src/$(LIB_PREFIX)xpconnect_s.$(LIB_SUFFIX)
 
+ifdef MOZ_GSTREAMER
+EXTRA_DSO_LDOPTS += $(GSTREAMER_LIBS)
+endif
+
 include $(topsrcdir)/config/config.mk
 include $(topsrcdir)/ipc/chromium/chromium-config.mk
 
 include $(topsrcdir)/config/rules.mk
 
 LOCAL_INCLUDES	+= -I$(srcdir)/../base \
 		   -I$(srcdir)/../generic \
 		   -I$(srcdir)/../forms \
--- a/modules/libpref/src/init/all.js
+++ b/modules/libpref/src/init/all.js
@@ -188,16 +188,20 @@ pref("media.raw.enabled", true);
 pref("media.ogg.enabled", true);
 #endif
 #ifdef MOZ_WAVE
 pref("media.wave.enabled", true);
 #endif
 #ifdef MOZ_WEBM
 pref("media.webm.enabled", true);
 #endif
+#ifdef MOZ_GSTREAMER
+pref("media.h264.enabled", true);
+#endif
+
 
 // Whether to autostart a media element with an |autoplay| attribute
 pref("media.autoplay.enabled", true);
 
 // 0 = Off, 1 = Full, 2 = Tagged Images Only. 
 // See eCMSMode in gfx/thebes/gfxPlatform.h
 pref("gfx.color_management.mode", 2);
 pref("gfx.color_management.display_profile", "");
--- a/toolkit/library/Makefile.in
+++ b/toolkit/library/Makefile.in
@@ -550,16 +550,20 @@ endif # WINNT
 ifdef MOZ_JPROF
 EXTRA_DSO_LDOPTS += -ljprof
 endif
 
 ifdef MOZ_ENABLE_QT
 EXTRA_DSO_LDOPTS += $(MOZ_QT_LDFLAGS) $(XEXT_LIBS)
 endif
 
+ifdef MOZ_GSTREAMER
+EXTRA_DSO_LDOPTS += $(GSTREAMER_LIBS)
+endif
+
 include $(topsrcdir)/config/rules.mk
 
 export:: $(RDF_UTIL_SRC_CPPSRCS) $(INTL_UNICHARUTIL_UTIL_CPPSRCS)
 	$(INSTALL) $^ .
 
 # need widget/windows for resource.h (included from widget.rc)
 LOCAL_INCLUDES += \
   -I$(topsrcdir)/config \