Bug 659188 - Android and media parts [r=blassey] [r=cpearce]
authorFabrice Desré <fabrice@mozilla.com>
Mon, 26 Sep 2011 17:25:41 -0700
changeset 78958 5fc1c56499278628ada047f97c590754dad8f233
parent 78957 78b3f8faf4e3469ea4ba1c89f62cc421ca1b7352
child 78959 b620680b7f08ba31d449a9077c4b3eae9d6b4997
push id78
push userclegnitto@mozilla.com
push dateFri, 16 Dec 2011 17:32:24 +0000
treeherdermozilla-release@79d24e644fdd [default view] [failures only]
perfherder[talos] [build metrics] [platform microbench] (compared to previous push)
reviewersblassey, cpearce
bugs659188
milestone9.0a1
first release with
nightly linux32
nightly linux64
nightly mac
nightly win32
nightly win64
last release without
nightly linux32
nightly linux64
nightly mac
nightly win32
nightly win64
Bug 659188 - Android and media parts [r=blassey] [r=cpearce]
configure.in
content/media/nsBuiltinDecoderStateMachine.cpp
content/media/nsBuiltinDecoderStateMachine.h
content/media/raw/Makefile.in
content/media/raw/nsRawDecoder.cpp
content/media/raw/nsRawReader.cpp
content/media/raw/nsRawReader.h
content/media/raw/nsRawStructs.h
embedding/android/AndroidManifest.xml.in
embedding/android/GeckoAppShell.java
mobile/app/mobile.js
mobile/confvars.sh
netwerk/protocol/device/AndroidCaptureProvider.cpp
netwerk/protocol/device/AndroidCaptureProvider.h
netwerk/protocol/device/CameraStreamImpl.cpp
netwerk/protocol/device/CameraStreamImpl.h
netwerk/protocol/device/Makefile.in
netwerk/protocol/device/nsDeviceCaptureProvider.h
netwerk/protocol/device/nsDeviceChannel.cpp
other-licenses/android/APKOpen.cpp
widget/src/android/AndroidBridge.cpp
widget/src/android/AndroidBridge.h
--- a/configure.in
+++ b/configure.in
@@ -4352,17 +4352,17 @@ MOZ_XTF=1
 MOZ_XUL=1
 MOZ_ZIPWRITER=1
 NS_PRINTING=1
 MOZ_PDF_PRINTING=
 MOZ_DISABLE_DOMCRYPTO=
 NSS_DISABLE_DBM=
 NECKO_WIFI=1
 NECKO_COOKIES=1
-NECKO_PROTOCOLS_DEFAULT="about data file ftp http res viewsource websocket wyciwyg"
+NECKO_PROTOCOLS_DEFAULT="about data file ftp http res viewsource websocket wyciwyg device"
 USE_ARM_KUSER=
 BUILD_CTYPES=1
 MOZ_USE_NATIVE_POPUP_WINDOWS=
 
 
 case "${target}" in
 *android*|*darwin*)
     ACCESSIBILITY=
--- a/content/media/nsBuiltinDecoderStateMachine.cpp
+++ b/content/media/nsBuiltinDecoderStateMachine.cpp
@@ -40,38 +40,32 @@
 #include "nsAudioStream.h"
 #include "nsTArray.h"
 #include "nsBuiltinDecoder.h"
 #include "nsBuiltinDecoderReader.h"
 #include "nsBuiltinDecoderStateMachine.h"
 #include "mozilla/mozalloc.h"
 #include "VideoUtils.h"
 #include "nsTimeRanges.h"
+#include "mozilla/Preferences.h"
 
 using namespace mozilla;
 using namespace mozilla::layers;
 
 #ifdef PR_LOGGING
 extern PRLogModuleInfo* gBuiltinDecoderLog;
 #define LOG(type, msg) PR_LOG(gBuiltinDecoderLog, type, msg)
 #else
 #define LOG(type, msg)
 #endif
 
-// Wait this number of seconds when buffering, then leave and play
+// Wait this number of milliseconds when buffering, then leave and play
 // as best as we can if the required amount of data hasn't been
 // retrieved.
-static const PRUint32 BUFFERING_WAIT = 30;
-
-// The amount of data to retrieve during buffering is computed based
-// on the download rate. BUFFERING_MIN_RATE is the minimum download
-// rate to be used in that calculation to help avoid constant buffering
-// attempts at a time when the average download rate has not stabilised.
-#define BUFFERING_MIN_RATE 50000
-#define BUFFERING_RATE(x) ((x)< BUFFERING_MIN_RATE ? BUFFERING_MIN_RATE : (x))
+static const PRUint32 BUFFERING_WAIT = 30000;
 
 // If audio queue has less than this many usecs of decoded audio, we won't risk
 // trying to decode the video, we'll skip decoding video up to the next
 // keyframe. We may increase this value for an individual decoder if we
 // encounter video frames which take a long time to decode.
 static const PRUint32 LOW_AUDIO_USECS = 300000;
 
 // If more than this many usecs of decoded audio is queued, we'll hold off
@@ -189,17 +183,18 @@ public:
     mThread = nsnull;
     return NS_OK;
   }
 private:
   nsCOMPtr<nsIThread> mThread;
 };
 
 nsBuiltinDecoderStateMachine::nsBuiltinDecoderStateMachine(nsBuiltinDecoder* aDecoder,
-                                                           nsBuiltinDecoderReader* aReader) :
+                                                           nsBuiltinDecoderReader* aReader,
+                                                           PRPackedBool aRealTime) :
   mDecoder(aDecoder),
   mState(DECODER_STATE_DECODING_METADATA),
   mCbCrSize(0),
   mPlayDuration(0),
   mStartTime(-1),
   mEndTime(-1),
   mSeekTime(0),
   mFragmentEndTime(-1),
@@ -216,28 +211,36 @@ nsBuiltinDecoderStateMachine::nsBuiltinD
   mStopDecodeThread(PR_TRUE),
   mDecodeThreadIdle(PR_FALSE),
   mStopAudioThread(PR_TRUE),
   mQuickBuffering(PR_FALSE),
   mIsRunning(PR_FALSE),
   mRunAgain(PR_FALSE),
   mDispatchedRunEvent(PR_FALSE),
   mDecodeThreadWaiting(PR_FALSE),
-  mEventManager(aDecoder)
+  mEventManager(aDecoder),
+  mRealTime(aRealTime)
 {
   MOZ_COUNT_CTOR(nsBuiltinDecoderStateMachine);
   NS_ASSERTION(NS_IsMainThread(), "Should be on main thread.");
   if (gStateMachineCount == 0) {
     NS_ASSERTION(!gStateMachineThread, "Should have null state machine thread!");
     nsresult res = NS_NewThread(&gStateMachineThread,
                                 nsnull,
                                 MEDIA_THREAD_STACK_SIZE);
     NS_ABORT_IF_FALSE(NS_SUCCEEDED(res), "Can't create media state machine thread");
   }
   gStateMachineCount++;
+
+  // only enable realtime mode when "media.realtime_decoder.enabled" is true.
+  if (Preferences::GetBool("media.realtime_decoder.enabled", PR_FALSE) == PR_FALSE)
+    mRealTime = PR_FALSE;
+
+  mBufferingWait = mRealTime ? 0 : BUFFERING_WAIT;
+  mLowDataThresholdUsecs = mRealTime ? 0 : LOW_DATA_THRESHOLD_USECS;
 }
 
 nsBuiltinDecoderStateMachine::~nsBuiltinDecoderStateMachine()
 {
   NS_ASSERTION(NS_IsMainThread(), "Should be on main thread.");
   MOZ_COUNT_DTOR(nsBuiltinDecoderStateMachine);
   if (mTimer)
     mTimer->Cancel();
@@ -327,22 +330,22 @@ void nsBuiltinDecoderStateMachine::Decod
   // If the video decode is falling behind the audio, we'll start dropping the
   // inter-frames up until the next keyframe which is at or before the current
   // playback position. skipToNextKeyframe is PR_TRUE if we're currently
   // skipping up to the next keyframe.
   PRBool skipToNextKeyframe = PR_FALSE;
 
   // Once we've decoded more than videoPumpThreshold video frames, we'll
   // no longer be considered to be "pumping video".
-  const unsigned videoPumpThreshold = AMPLE_VIDEO_FRAMES / 2;
+  const unsigned videoPumpThreshold = mRealTime ? 0 : AMPLE_VIDEO_FRAMES / 2;
 
   // After the audio decode fills with more than audioPumpThreshold usecs
   // of decoded audio, we'll start to check whether the audio or video decode
   // is falling behind.
-  const unsigned audioPumpThreshold = LOW_AUDIO_USECS * 2;
+  const unsigned audioPumpThreshold = mRealTime ? 0 : LOW_AUDIO_USECS * 2;
 
   // Our local low audio threshold. We may increase this if we're slow to
   // decode video frames, in order to reduce the chance of audio underruns.
   PRInt64 lowAudioThreshold = LOW_AUDIO_USECS;
 
   // Our local ample audio threshold. If we increase lowAudioThreshold, we'll
   // also increase this too appropriately (we don't want lowAudioThreshold to
   // be greater than ampleAudioThreshold, else we'd stop decoding!).
@@ -424,17 +427,17 @@ void nsBuiltinDecoderStateMachine::Decod
 
     // Audio decode.
     if (audioPlaying &&
         (GetDecodedAudioDuration() < ampleAudioThreshold || audioQueue.GetSize() == 0))
     {
       ReentrantMonitorAutoExit exitMon(mDecoder->GetReentrantMonitor());
       audioPlaying = mReader->DecodeAudioData();
     }
-    
+
     // Notify to ensure that the AudioLoop() is not waiting, in case it was
     // waiting for more audio to be decoded.
     mDecoder->GetReentrantMonitor().NotifyAll();
 
     // The ready state can change when we've decoded data, so update the
     // ready state, so that DOM events can fire.
     UpdateReadyState();
 
@@ -1141,17 +1144,17 @@ PRBool nsBuiltinDecoderStateMachine::Has
          (!HasAudio() &&
           HasVideo() &&
           !mReader->mVideoQueue.IsFinished() &&
           static_cast<PRUint32>(mReader->mVideoQueue.GetSize()) < LOW_VIDEO_FRAMES));
 }
 
 PRBool nsBuiltinDecoderStateMachine::HasLowUndecodedData() const
 {
-  return GetUndecodedData() < LOW_DATA_THRESHOLD_USECS;
+  return GetUndecodedData() < mLowDataThresholdUsecs;
 }
 
 PRInt64 nsBuiltinDecoderStateMachine::GetUndecodedData() const
 {
   mDecoder->GetReentrantMonitor().AssertCurrentThreadIn();
   NS_ASSERTION(mState > DECODER_STATE_DECODING_METADATA,
                "Must have loaded metadata for GetBuffered() to work");
   nsTimeRanges buffered;
@@ -1506,28 +1509,28 @@ nsresult nsBuiltinDecoderStateMachine::R
       NS_ASSERTION(!mBufferingStart.IsNull(), "Must know buffering start time.");
 
       // We will remain in the buffering state if we've not decoded enough
       // data to begin playback, or if we've not downloaded a reasonable
       // amount of data inside our buffering time.
       TimeDuration elapsed = now - mBufferingStart;
       PRBool isLiveStream = mDecoder->GetCurrentStream()->GetLength() == -1;
       if ((isLiveStream || !mDecoder->CanPlayThrough()) &&
-            elapsed < TimeDuration::FromSeconds(BUFFERING_WAIT) &&
+            elapsed < TimeDuration::FromSeconds(mBufferingWait) &&
             (mQuickBuffering ? HasLowDecodedData(QUICK_BUFFERING_LOW_DATA_USECS)
-                            : (GetUndecodedData() < BUFFERING_WAIT * USECS_PER_S)) &&
+                            : (GetUndecodedData() < mBufferingWait * USECS_PER_S / 1000)) &&
             !stream->IsDataCachedToEndOfStream(mDecoder->mDecoderPosition) &&
             !stream->IsSuspended())
       {
         LOG(PR_LOG_DEBUG,
             ("%p Buffering: %.3lfs/%ds, timeout in %.3lfs %s",
               mDecoder.get(),
               GetUndecodedData() / static_cast<double>(USECS_PER_S),
-              BUFFERING_WAIT,
-              BUFFERING_WAIT - elapsed.ToSeconds(),
+              mBufferingWait,
+              mBufferingWait - elapsed.ToSeconds(),
               (mQuickBuffering ? "(quick exit)" : "")));
         ScheduleStateMachine(USECS_PER_S);
         return NS_OK;
       } else {
         LOG(PR_LOG_DEBUG, ("%p Changed state from BUFFERING to DECODING", mDecoder.get()));
         LOG(PR_LOG_DEBUG, ("%p Buffered for %.3lfs",
                             mDecoder.get(),
                             (now - mBufferingStart).ToSeconds()));
@@ -1678,17 +1681,17 @@ void nsBuiltinDecoderStateMachine::Advan
 
   // Skip frames up to the frame at the playback position, and figure out
   // the time remaining until it's time to display the next frame.
   PRInt64 remainingTime = AUDIO_DURATION_USECS;
   NS_ASSERTION(clock_time >= mStartTime, "Should have positive clock time.");
   nsAutoPtr<VideoData> currentFrame;
   if (mReader->mVideoQueue.GetSize() > 0) {
     VideoData* frame = mReader->mVideoQueue.PeekFront();
-    while (clock_time >= frame->mTime) {
+    while (mRealTime || clock_time >= frame->mTime) {
       mVideoFrameEndTime = frame->mEndTime;
       currentFrame = frame;
       mReader->mVideoQueue.PopFront();
       // Notify the decode thread that the video queue's buffers may have
       // free'd up space for more frames.
       mDecoder->GetReentrantMonitor().NotifyAll();
       mDecoder->UpdatePlaybackOffset(frame->mOffset);
       if (mReader->mVideoQueue.GetSize() == 0)
@@ -1903,17 +1906,17 @@ PRBool nsBuiltinDecoderStateMachine::IsP
     (mState == DECODER_STATE_DECODING || mState == DECODER_STATE_BUFFERING);
 }
 
 nsresult nsBuiltinDecoderStateMachine::Run()
 {
   ReentrantMonitorAutoEnter mon(mDecoder->GetReentrantMonitor());
   NS_ASSERTION(OnStateMachineThread(), "Should be on state machine thread.");
 
-  return CallRunStateMachine();
+  return CallRunStateMachine();
 }
 
 nsresult nsBuiltinDecoderStateMachine::CallRunStateMachine()
 {
   mDecoder->GetReentrantMonitor().AssertCurrentThreadIn();
   NS_ASSERTION(OnStateMachineThread(), "Should be on state machine thread.");
   // This will be set to PR_TRUE by ScheduleStateMachine() if it's called
   // while we're in RunStateMachine().
@@ -1946,19 +1949,19 @@ static void TimeoutExpired(nsITimer *aTi
 
 void nsBuiltinDecoderStateMachine::TimeoutExpired()
 {
   ReentrantMonitorAutoEnter mon(mDecoder->GetReentrantMonitor());
   NS_ASSERTION(OnStateMachineThread(), "Must be on state machine thread");
   if (mIsRunning) {
     mRunAgain = PR_TRUE;
   } else if (!mDispatchedRunEvent) {
-    // We don't have an event dispatched to run the state machine, so we
-    // can just run it from here.
-    CallRunStateMachine();
+    // We don't have an event dispatched to run the state machine, so we
+    // can just run it from here.
+    CallRunStateMachine();
   }
   // Otherwise, an event has already been dispatched to run the state machine
   // as soon as possible. Nothing else needed to do, the state machine is
   // going to run anyway.
 }
 
 nsresult nsBuiltinDecoderStateMachine::ScheduleStateMachine() {
   return ScheduleStateMachine(0);
@@ -1984,16 +1987,18 @@ nsresult nsBuiltinDecoderStateMachine::S
     if (mTimer) {
       // We've been asked to schedule a timer to run before an existing timer.
       // Cancel the existing timer.
       mTimer->Cancel();
     }
   }
 
   PRUint32 ms = static_cast<PRUint32>((aUsecs / USECS_PER_MS) & 0xFFFFFFFF);
+  if (mRealTime && ms > 40)
+    ms = 40;
   if (ms == 0) {
     if (mIsRunning) {
       // We're currently running this state machine on the state machine
       // thread. Signal it to run again once it finishes its current cycle.
       mRunAgain = PR_TRUE;
       return NS_OK;
     } else if (!mDispatchedRunEvent) {
       // We're not currently running this state machine on the state machine
--- a/content/media/nsBuiltinDecoderStateMachine.h
+++ b/content/media/nsBuiltinDecoderStateMachine.h
@@ -132,17 +132,17 @@ hardware (via nsAudioStream and libsydne
 */
 class nsBuiltinDecoderStateMachine : public nsDecoderStateMachine
 {
 public:
   typedef mozilla::ReentrantMonitor ReentrantMonitor;
   typedef mozilla::TimeStamp TimeStamp;
   typedef mozilla::TimeDuration TimeDuration;
 
-  nsBuiltinDecoderStateMachine(nsBuiltinDecoder* aDecoder, nsBuiltinDecoderReader* aReader);
+  nsBuiltinDecoderStateMachine(nsBuiltinDecoder* aDecoder, nsBuiltinDecoderReader* aReader, PRPackedBool aRealTime = PR_FALSE);
   ~nsBuiltinDecoderStateMachine();
 
   // nsDecoderStateMachine interface
   virtual nsresult Init(nsDecoderStateMachine* aCloneDonor);
   State GetState()
   { 
     mDecoder->GetReentrantMonitor().AssertCurrentThreadIn();
     return mState; 
@@ -619,16 +619,22 @@ protected:
   // first is shutting down a thread, causing inconsistent state.
   PRPackedBool mDispatchedRunEvent;
 
   // PR_TRUE if the decode thread has gone filled its buffers and is now
   // waiting to be awakened before it continues decoding. Synchronized
   // by the decoder monitor.
   PRPackedBool mDecodeThreadWaiting;
 
+  // true is we are decoding a realtime stream, like a camera stream
+  PRPackedBool mRealTime;
+  
+  PRUint32 mBufferingWait;
+  PRInt64  mLowDataThresholdUsecs;
+
 private:
   // Manager for queuing and dispatching MozAudioAvailable events.  The
   // event manager is accessed from the state machine and audio threads,
   // and takes care of synchronizing access to its internal queue.
   nsAudioAvailableEventManager mEventManager;
 
   // Stores presentation info required for playback. The decoder monitor
   // must be held when accessing this.
--- a/content/media/raw/Makefile.in
+++ b/content/media/raw/Makefile.in
@@ -45,16 +45,17 @@ include $(DEPTH)/config/autoconf.mk
 
 MODULE		= content
 LIBRARY_NAME	= gkconraw_s
 LIBXUL_LIBRARY  = 1
 
 EXPORTS		+= \
 		nsRawDecoder.h \
                 nsRawReader.h \
+		nsRawStructs.h \
 		$(NULL)
 
 CPPSRCS		+= \
 		nsRawDecoder.cpp \
                 nsRawReader.cpp \
 		$(NULL)
 
 FORCE_STATIC_LIB = 1
--- a/content/media/raw/nsRawDecoder.cpp
+++ b/content/media/raw/nsRawDecoder.cpp
@@ -35,10 +35,10 @@
  * ***** END LICENSE BLOCK ***** */
 
 #include "nsBuiltinDecoderStateMachine.h"
 #include "nsRawReader.h"
 #include "nsRawDecoder.h"
 
 nsDecoderStateMachine* nsRawDecoder::CreateStateMachine()
 {
-  return new nsBuiltinDecoderStateMachine(this, new nsRawReader(this));
+  return new nsBuiltinDecoderStateMachine(this, new nsRawReader(this), PR_TRUE);
 }
--- a/content/media/raw/nsRawReader.cpp
+++ b/content/media/raw/nsRawReader.cpp
@@ -38,18 +38,16 @@
  * ***** END LICENSE BLOCK ***** */
 
 #include "nsBuiltinDecoderStateMachine.h"
 #include "nsBuiltinDecoder.h"
 #include "nsRawReader.h"
 #include "nsRawDecoder.h"
 #include "VideoUtils.h"
 
-static const PRUint24 RAW_ID = 0x595556;
-
 nsRawReader::nsRawReader(nsBuiltinDecoder* aDecoder)
   : nsBuiltinDecoderReader(aDecoder),
     mCurrentFrame(0), mFrameSize(0)
 {
   MOZ_COUNT_CTOR(nsRawReader);
 }
 
 nsRawReader::~nsRawReader()
--- a/content/media/raw/nsRawReader.h
+++ b/content/media/raw/nsRawReader.h
@@ -36,61 +36,17 @@
  * the terms of any one of the MPL, the GPL or the LGPL.
  *
  * ***** END LICENSE BLOCK ***** */
 
 #if !defined(nsRawReader_h_)
 #define nsRawReader_h_
 
 #include "nsBuiltinDecoderReader.h"
-
-struct nsRawVideo_PRUint24 {
-  operator PRUint32() const { return value[2] << 16 | value[1] << 8 | value[0]; }
-private:
-  PRUint8 value[3];
-};
-
-struct nsRawPacketHeader {
-  typedef nsRawVideo_PRUint24 PRUint24;
-  PRUint8 packetID;
-  PRUint24 codecID;
-};
-
-// This is Arc's draft from wiki.xiph.org/OggYUV
-struct nsRawVideoHeader {
-  typedef nsRawVideo_PRUint24 PRUint24;
-  PRUint8 headerPacketID;          // Header Packet ID (always 0)
-  PRUint24 codecID;                // Codec identifier (always "YUV")
-  PRUint8 majorVersion;            // Version Major (breaks backwards compat)
-  PRUint8 minorVersion;            // Version Minor (preserves backwards compat)
-  PRUint16 options;                // Bit 1: Color (false = B/W)
-                                   // Bits 2-4: Chroma Pixel Shape
-                                   // Bit 5: 50% horizontal offset for Cr samples
-                                   // Bit 6: 50% vertical ...
-                                   // Bits 7-8: Chroma Blending
-                                   // Bit 9: Packed (false = Planar)
-                                   // Bit 10: Cr Staggered Horizontally
-                                   // Bit 11: Cr Staggered Vertically
-                                   // Bit 12: Unused (format is always little endian)
-                                   // Bit 13: Interlaced (false = Progressive)
-                                   // Bits 14-16: Interlace options (undefined)
-
-  PRUint8 alphaChannelBpp;
-  PRUint8 lumaChannelBpp;
-  PRUint8 chromaChannelBpp;
-  PRUint8 colorspace;
-
-  PRUint24 frameWidth;
-  PRUint24 frameHeight;
-  PRUint24 aspectNumerator;
-  PRUint24 aspectDenominator;
-
-  PRUint32 framerateNumerator;
-  PRUint32 framerateDenominator;
-};
+#include "nsRawStructs.h"
 
 class nsRawReader : public nsBuiltinDecoderReader
 {
 public:
   nsRawReader(nsBuiltinDecoder* aDecoder);
   ~nsRawReader();
 
   virtual nsresult Init(nsBuiltinDecoderReader* aCloneDonor);
new file mode 100644
--- /dev/null
+++ b/content/media/raw/nsRawStructs.h
@@ -0,0 +1,94 @@
+/* ***** BEGIN LICENSE BLOCK *****
+ * Version: MPL 1.1/GPL 2.0/LGPL 2.1
+ *
+ * The contents of this file are subject to the Mozilla Public License Version
+ * 1.1 (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ * http://www.mozilla.org/MPL/
+ *
+ * Software distributed under the License is distributed on an "AS IS" basis,
+ * WITHOUT WARRANTY OF ANY KIND, either express or implied. See the License
+ * for the specific language governing rights and limitations under the
+ * License.
+ *
+ * The Original Code is Mozilla code.
+ *
+ * The Initial Developer of the Original Code is 
+ *   Mozilla Foundation.
+ * Portions created by the Initial Developer are Copyright (C) 2010
+ * the Initial Developer. All Rights Reserved.
+ *
+ * Contributor(s):
+ *  Brad Lassey
+ *  Kyle Huey <me@kylehuey.com>
+ *
+ * Alternatively, the contents of this file may be used under the terms of
+ * either the GNU General Public License Version 2 or later (the "GPL"), or
+ * the GNU Lesser General Public License Version 2.1 or later (the "LGPL"),
+ * in which case the provisions of the GPL or the LGPL are applicable instead
+ * of those above. If you wish to allow use of your version of this file only
+ * under the terms of either the GPL or the LGPL, and not to allow others to
+ * use your version of this file under the terms of the MPL, indicate your
+ * decision by deleting the provisions above and replace them with the notice
+ * and other provisions required by the GPL or the LGPL. If you do not delete
+ * the provisions above, a recipient may use your version of this file under
+ * the terms of any one of the MPL, the GPL or the LGPL.
+ *
+ * ***** END LICENSE BLOCK ***** */
+
+#if !defined(nsRawStructs_h_)
+#define nsRawStructs_h_
+
+static const PRUint32 RAW_ID = 0x595556;
+
+struct nsRawVideo_PRUint24 {
+  operator PRUint32() const { return value[2] << 16 | value[1] << 8 | value[0]; }
+  nsRawVideo_PRUint24& operator= (const PRUint32& rhs)
+  { value[2] = (rhs & 0x00FF0000) >> 16;
+    value[1] = (rhs & 0x0000FF00) >> 8;
+    value[0] = (rhs & 0x000000FF);
+    return *this; }
+private:
+  PRUint8 value[3];
+};
+
+struct nsRawPacketHeader {
+  typedef nsRawVideo_PRUint24 PRUint24;
+  PRUint8 packetID;
+  PRUint24 codecID;
+};
+
+// This is Arc's draft from wiki.xiph.org/OggYUV
+struct nsRawVideoHeader {
+  typedef nsRawVideo_PRUint24 PRUint24;
+  PRUint8 headerPacketID;          // Header Packet ID (always 0)
+  PRUint24 codecID;                // Codec identifier (always "YUV")
+  PRUint8 majorVersion;            // Version Major (breaks backwards compat)
+  PRUint8 minorVersion;            // Version Minor (preserves backwards compat)
+  PRUint16 options;                // Bit 1: Color (false = B/W)
+                                   // Bits 2-4: Chroma Pixel Shape
+                                   // Bit 5: 50% horizontal offset for Cr samples
+                                   // Bit 6: 50% vertical ...
+                                   // Bits 7-8: Chroma Blending
+                                   // Bit 9: Packed (false = Planar)
+                                   // Bit 10: Cr Staggered Horizontally
+                                   // Bit 11: Cr Staggered Vertically
+                                   // Bit 12: Unused (format is always little endian)
+                                   // Bit 13: Interlaced (false = Progressive)
+                                   // Bits 14-16: Interlace options (undefined)
+
+  PRUint8 alphaChannelBpp;
+  PRUint8 lumaChannelBpp;
+  PRUint8 chromaChannelBpp;
+  PRUint8 colorspace;
+
+  PRUint24 frameWidth;
+  PRUint24 frameHeight;
+  PRUint24 aspectNumerator;
+  PRUint24 aspectDenominator;
+
+  PRUint32 framerateNumerator;
+  PRUint32 framerateDenominator;
+};
+
+#endif // nsRawStructs_h_
\ No newline at end of file
--- a/embedding/android/AndroidManifest.xml.in
+++ b/embedding/android/AndroidManifest.xml.in
@@ -25,16 +25,20 @@
     <uses-permission android:name="android.permission.CHANGE_NETWORK_STATE"/> 
     <uses-permission android:name="android.permission.ACCESS_NETWORK_STATE"/>
     <uses-permission android:name="android.permission.CHANGE_NETWORK_STATE"/>
 
     <uses-feature android:name="android.hardware.location" android:required="false"/>
     <uses-feature android:name="android.hardware.location.gps" android:required="false"/>
     <uses-feature android:name="android.hardware.touchscreen"/>
 
+    <uses-permission android:name="android.permission.CAMERA" />
+    <uses-feature android:name="android.hardware.camera" />
+    <uses-feature android:name="android.hardware.camera.autofocus" />
+ 
     <application android:label="@MOZ_APP_DISPLAYNAME@"
 		 android:icon="@drawable/icon"
 #if MOZILLA_OFFICIAL
 		 android:debuggable="false">
 #else
 		 android:debuggable="true">
 #endif
 
--- a/embedding/android/GeckoAppShell.java
+++ b/embedding/android/GeckoAppShell.java
@@ -1599,9 +1599,100 @@ public class GeckoAppShell
             GeckoAppShell.executeNextRunnable();
         }
     }
 
     public static void postToJavaThread(boolean mainThread) {
         Log.i("GeckoShell", "post to " + (mainThread ? "main " : "") + "java thread");
         getMainHandler().post(new GeckoRunnableCallback());
     }
+    
+    public static android.hardware.Camera sCamera = null;
+    
+    static native void cameraCallbackBridge(byte[] data);
+
+    static int kPreferedFps = 25;
+    static byte[] sCameraBuffer = null;
+ 
+    static int[] initCamera(String aContentType, int aCamera, int aWidth, int aHeight) {
+        Log.i("GeckoAppJava", "initCamera(" + aContentType + ", " + aWidth + "x" + aHeight + ") on thread " + Thread.currentThread().getId());
+
+        // [0] = 0|1 (failure/success)
+        // [1] = width
+        // [2] = height
+        // [3] = fps
+        int[] result = new int[4];
+        result[0] = 0;
+
+        if (Build.VERSION.SDK_INT >= 9) {
+            if (android.hardware.Camera.getNumberOfCameras() == 0)
+                return result;
+        }
+
+        try {
+            sCamera = android.hardware.Camera.open(aCamera);
+            android.hardware.Camera.Parameters params = sCamera.getParameters();
+            params.setPreviewFormat(ImageFormat.NV21);
+
+            // use the preview fps closest to 25 fps.
+            int fpsDelta = 1000;
+            try {
+                Iterator<Integer> it = params.getSupportedPreviewFrameRates().iterator();
+                while (it.hasNext()) {
+                    int nFps = it.next();
+                    if (Math.abs(nFps - kPreferedFps) < fpsDelta) {
+                        fpsDelta = Math.abs(nFps - kPreferedFps);
+                        params.setPreviewFrameRate(nFps);
+                    }
+                }
+            } catch(Exception e) {
+                params.setPreviewFrameRate(kPreferedFps);
+            }
+
+            // set up the closest preview size available
+            Iterator<android.hardware.Camera.Size> sit = params.getSupportedPreviewSizes().iterator();
+            int sizeDelta = 10000000;
+            int bufferSize = 0;
+            while (sit.hasNext()) {
+                android.hardware.Camera.Size size = sit.next();
+                if (Math.abs(size.width * size.height - aWidth * aHeight) < sizeDelta) {
+                    sizeDelta = Math.abs(size.width * size.height - aWidth * aHeight);
+                    params.setPreviewSize(size.width, size.height);
+                    bufferSize = size.width * size.height;
+                }
+            }
+            
+            sCamera.setParameters(params);
+            sCameraBuffer = new byte[(bufferSize * 12) / 8];
+            sCamera.addCallbackBuffer(sCameraBuffer);
+            sCamera.setPreviewCallbackWithBuffer(new android.hardware.Camera.PreviewCallback() {
+                public void onPreviewFrame(byte[] data, android.hardware.Camera camera) {
+                    cameraCallbackBridge(data);
+                    sCamera.addCallbackBuffer(sCameraBuffer);
+                }
+            });
+            sCamera.startPreview();
+            params = sCamera.getParameters();
+            Log.i("GeckoAppJava", "Camera: " + params.getPreviewSize().width + "x" + params.getPreviewSize().height +
+                  " @ " + params.getPreviewFrameRate() + "fps. format is " + params.getPreviewFormat());
+            result[0] = 1;
+            result[1] = params.getPreviewSize().width;
+            result[2] = params.getPreviewSize().height;
+            result[3] = params.getPreviewFrameRate();
+
+            Log.i("GeckoAppJava", "Camera preview started");
+        } catch(RuntimeException e) {
+            Log.e("GeckoAppJava", "initCamera RuntimeException : ", e);
+            result[0] = result[1] = result[2] = result[3] = 0;
+        }
+        return result;
+    }
+
+    static synchronized void closeCamera() {
+        Log.i("GeckoAppJava", "closeCamera() on thread " + Thread.currentThread().getId());
+        if (sCamera != null) {
+            sCamera.stopPreview();
+            sCamera.release();
+            sCamera = null;
+            sCameraBuffer = null;
+        }
+    }
 }
--- a/mobile/app/mobile.js
+++ b/mobile/app/mobile.js
@@ -673,8 +673,12 @@ pref("browser.firstrun.show.localepicker
 //
 // On Android, you also need to do the following for the output
 // to show up in logcat:
 //
 // $ adb shell stop
 // $ adb shell setprop log.redirect-stdio true
 // $ adb shell start
 pref("browser.dom.window.dump.enabled", false);
+
+// controls if we want camera support
+pref("device.camera.enabled", true);
+pref("media.realtime_decoder.enabled", true);
--- a/mobile/confvars.sh
+++ b/mobile/confvars.sh
@@ -51,14 +51,19 @@ MOZ_DISABLE_DOMCRYPTO=1
 
 if test "$LIBXUL_SDK"; then
 MOZ_XULRUNNER=1
 else
 MOZ_XULRUNNER=
 MOZ_PLACES=1
 fi
 
+if test "$OS_TARGET" = "Android"; then
+MOZ_CAPTURE=1
+MOZ_RAW=1
+fi
+
 # Needed for building our components as part of libxul
 MOZ_APP_COMPONENT_LIBS="browsercomps"
 MOZ_APP_COMPONENT_INCLUDE=nsBrowserComponents.h
 
 # use custom widget for html:select
 MOZ_USE_NATIVE_POPUP_WINDOWS=1
new file mode 100644
--- /dev/null
+++ b/netwerk/protocol/device/AndroidCaptureProvider.cpp
@@ -0,0 +1,333 @@
+/* -*- Mode: C++; tab-width: 8; indent-tabs-mode: nil; c-basic-offset: 2 -*- */
+/* ***** BEGIN LICENSE BLOCK *****
+ * Version: MPL 1.1/GPL 2.0/LGPL 2.1
+ *
+ * The contents of this file are subject to the Mozilla Public License Version
+ * 1.1 (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ * http://www.mozilla.org/MPL/
+ *
+ * Software distributed under the License is distributed on an "AS IS" basis,
+ * WITHOUT WARRANTY OF ANY KIND, either express or implied. See the License
+ * for the specific language governing rights and limitations under the
+ * License.
+ *
+ * The Original Code is Camera.
+ *
+ * The Initial Developer of the Original Code is Mozilla Corporation
+ * Portions created by the Initial Developer are Copyright (C) 2009
+ * the Initial Developer. All Rights Reserved.
+ *
+ * Contributor(s):
+ *  Fabrice Desré <fabrice@mozilla.com>
+ *
+ * Alternatively, the contents of this file may be used under the terms of
+ * either the GNU General Public License Version 2 or later (the "GPL"), or
+ * the GNU Lesser General Public License Version 2.1 or later (the "LGPL"),
+ * in which case the provisions of the GPL or the LGPL are applicable instead
+ * of those above. If you wish to allow use of your version of this file only
+ * under the terms of either the GPL or the LGPL, and not to allow others to
+ * use your version of this file under the terms of the MPL, indicate your
+ * decision by deleting the provisions above and replace them with the notice
+ * and other provisions required by the GPL or the LGPL. If you do not delete
+ * the provisions above, a recipient may use your version of this file under
+ * the terms of any one of the MPL, the GPL or the LGPL.
+ *
+ * ***** END LICENSE BLOCK ***** */
+
+#include "base/basictypes.h"
+#include "AndroidCaptureProvider.h"
+#include "nsXULAppAPI.h"
+#include "AndroidBridge.h"
+#include "nsStreamUtils.h"
+#include "nsThreadUtils.h"
+#include "nsMemory.h"
+#include "nsRawStructs.h"
+
+// The maximum number of frames we keep in our queue. Don't live in the past.
+#define MAX_FRAMES_QUEUED 10
+
+using namespace mozilla::net;
+
+NS_IMPL_THREADSAFE_ISUPPORTS2(AndroidCameraInputStream, nsIInputStream, nsIAsyncInputStream)
+
+AndroidCameraInputStream::AndroidCameraInputStream() :
+  mWidth(0), mHeight(0), mCamera(0), mHeaderSent(false), mClosed(true), mFrameSize(0),
+  mMonitor("AndroidCamera.Monitor")
+{
+  mAvailable = sizeof(nsRawVideoHeader);
+  mFrameQueue = new nsDeque();
+}
+
+AndroidCameraInputStream::~AndroidCameraInputStream() {
+  // clear the frame queue
+  while (mFrameQueue->GetSize() > 0) {
+    nsMemory::Free(mFrameQueue->PopFront());
+  }
+  delete mFrameQueue;
+}
+
+NS_IMETHODIMP
+AndroidCameraInputStream::Init(nsACString& aContentType, nsCaptureParams* aParams)
+{
+  if (XRE_GetProcessType() != GeckoProcessType_Default)
+    return NS_ERROR_NOT_IMPLEMENTED;
+
+  mContentType = aContentType;
+  mWidth = aParams->width;
+  mHeight = aParams->height;
+  mCamera = aParams->camera;
+  
+  CameraStreamImpl *impl = CameraStreamImpl::GetInstance(0);
+  if (!impl)
+    return NS_ERROR_OUT_OF_MEMORY;
+  if (impl->Init(mContentType, mCamera, mWidth, mHeight, this)) {
+    mWidth = impl->GetWidth();
+    mHeight = impl->GetHeight();
+    mClosed = false;
+  }
+  return NS_OK;
+}
+
+void AndroidCameraInputStream::ReceiveFrame(char* frame, PRUint32 length) {
+  {
+    mozilla::ReentrantMonitorAutoEnter autoMonitor(mMonitor);
+    if (mFrameQueue->GetSize() > MAX_FRAMES_QUEUED) {
+      nsMemory::Free(mFrameQueue->PopFront());
+      mAvailable -= mFrameSize;
+    }
+  }
+  
+  mFrameSize = sizeof(nsRawPacketHeader) + length;
+  
+  char* fullFrame = (char*)nsMemory::Alloc(mFrameSize);
+
+  if (!fullFrame)
+    return;
+  
+  nsRawPacketHeader* header = (nsRawPacketHeader*) fullFrame;
+  header->packetID = 0xFF;
+  header->codecID = 0x595556; // "YUV"
+  
+  // we copy the Y plane, and de-interlace the CrCb
+  
+  PRUint32 yFrameSize = mWidth * mHeight;
+  PRUint32 uvFrameSize = yFrameSize / 4;
+
+  memcpy(fullFrame + sizeof(nsRawPacketHeader), frame, yFrameSize);
+  
+  char* uFrame = fullFrame + yFrameSize;
+  char* vFrame = fullFrame + yFrameSize + uvFrameSize;
+  char* yFrame = frame + yFrameSize;
+  for (PRUint32 i = 0; i < uvFrameSize; i++) {
+    uFrame[i] = yFrame[2 * i + 1];
+    vFrame[i] = yFrame[2 * i];
+  }
+  
+  {
+    mozilla::ReentrantMonitorAutoEnter autoMonitor(mMonitor);
+    mAvailable += mFrameSize;
+    mFrameQueue->Push((void*)fullFrame);
+  }
+
+  NotifyListeners();
+}
+
+NS_IMETHODIMP
+AndroidCameraInputStream::Available(PRUint32 *aAvailable)
+{
+  mozilla::ReentrantMonitorAutoEnter autoMonitor(mMonitor);
+
+  *aAvailable = mAvailable;
+
+  return NS_OK;
+}
+
+NS_IMETHODIMP AndroidCameraInputStream::IsNonBlocking(PRBool *aNonBlock) {
+  *aNonBlock = PR_TRUE;
+  return NS_OK;
+}
+
+NS_IMETHODIMP AndroidCameraInputStream::Read(char *aBuffer, PRUint32 aCount, PRUint32 *aRead NS_OUTPARAM) {
+  return ReadSegments(NS_CopySegmentToBuffer, aBuffer, aCount, aRead);
+}
+
+NS_IMETHODIMP AndroidCameraInputStream::ReadSegments(nsWriteSegmentFun aWriter, void *aClosure, PRUint32 aCount, PRUint32 *aRead NS_OUTPARAM) {
+  *aRead = 0;
+  
+  nsresult rv;
+
+  if (mAvailable == 0)
+    return NS_BASE_STREAM_WOULD_BLOCK;
+  
+  if (aCount > mAvailable)
+    aCount = mAvailable;
+
+  if (!mHeaderSent) {
+    CameraStreamImpl *impl = CameraStreamImpl::GetInstance(0);
+    nsRawVideoHeader header;
+    header.headerPacketID = 0;
+    header.codecID = 0x595556; // "YUV"
+    header.majorVersion = 0;
+    header.minorVersion = 1;
+    header.options = 1 | 1 << 1; // color, 4:2:2
+
+    header.alphaChannelBpp = 0;
+    header.lumaChannelBpp = 8;
+    header.chromaChannelBpp = 4;
+    header.colorspace = 1;
+
+    header.frameWidth = mWidth;
+    header.frameHeight = mHeight;
+    header.aspectNumerator = 1;
+    header.aspectDenominator = 1;
+
+    header.framerateNumerator = impl->GetFps();
+    header.framerateDenominator = 1;
+
+    rv = aWriter(this, aClosure, (const char*)&header, 0, sizeof(nsRawVideoHeader), aRead);
+   
+    if (NS_FAILED(rv))
+      return NS_OK;
+    
+    mHeaderSent = true;
+    aCount -= sizeof(nsRawVideoHeader);
+    mAvailable -= sizeof(nsRawVideoHeader);
+  }
+  
+  {
+    mozilla::ReentrantMonitorAutoEnter autoMonitor(mMonitor);
+    while ((mAvailable > 0) && (aCount >= mFrameSize)) {
+      PRUint32 readThisTime = 0;
+
+      char* frame = (char*)mFrameQueue->PopFront();
+      rv = aWriter(this, aClosure, (const char*)frame, *aRead, mFrameSize, &readThisTime);
+
+      if (readThisTime != mFrameSize) {
+        mFrameQueue->PushFront((void*)frame);
+        return NS_OK;
+      }
+  
+      // nsRawReader does a copy when calling VideoData::Create()
+      nsMemory::Free(frame);
+  
+      if (NS_FAILED(rv))
+        return NS_OK;
+
+      aCount -= readThisTime;
+      mAvailable -= readThisTime;
+      *aRead += readThisTime;
+    }
+  }
+  return NS_OK;
+}
+
+NS_IMETHODIMP AndroidCameraInputStream::Close() {
+  return CloseWithStatus(NS_OK);
+}
+
+
+/**
+ * must be called on the main (java) thread
+ */
+void AndroidCameraInputStream::doClose() {
+  NS_ASSERTION(!mClosed, "Camera is already closed");
+
+  CameraStreamImpl *impl = CameraStreamImpl::GetInstance(0);
+  impl->Close();
+  mClosed = true;
+}
+
+
+void AndroidCameraInputStream::NotifyListeners() {
+  mozilla::ReentrantMonitorAutoEnter autoMonitor(mMonitor);
+  
+  if (mCallback && (mAvailable > sizeof(nsRawVideoHeader))) {
+    nsCOMPtr<nsIInputStreamCallback> callback;
+    if (mCallbackTarget) {
+      NS_NewInputStreamReadyEvent(getter_AddRefs(callback), mCallback, mCallbackTarget);
+    } else {
+      callback = mCallback;
+    }
+
+    NS_ASSERTION(callback, "Shouldn't fail to make the callback!");
+
+    // Null the callback first because OnInputStreamReady may reenter AsyncWait
+    mCallback = nsnull;
+    mCallbackTarget = nsnull;
+
+    callback->OnInputStreamReady(this);
+  }
+}
+
+NS_IMETHODIMP AndroidCameraInputStream::AsyncWait(nsIInputStreamCallback *aCallback, PRUint32 aFlags, PRUint32 aRequestedCount, nsIEventTarget *aTarget)
+{
+  if (aFlags != 0)
+    return NS_ERROR_NOT_IMPLEMENTED;
+
+  if (mCallback || mCallbackTarget)
+    return NS_ERROR_UNEXPECTED;
+
+  mCallbackTarget = aTarget;
+  mCallback = aCallback;
+
+  // What we are being asked for may be present already
+  NotifyListeners();
+  return NS_OK;
+}
+
+
+NS_IMETHODIMP AndroidCameraInputStream::CloseWithStatus(PRUint32 status)
+{
+  AndroidCameraInputStream::doClose();
+  return NS_OK;
+}
+
+/**
+ * AndroidCaptureProvider implementation
+ */
+
+NS_IMPL_THREADSAFE_ISUPPORTS0(AndroidCaptureProvider)
+
+AndroidCaptureProvider* AndroidCaptureProvider::sInstance = NULL;
+
+AndroidCaptureProvider::AndroidCaptureProvider() {
+}
+
+AndroidCaptureProvider::~AndroidCaptureProvider() {
+  AndroidCaptureProvider::sInstance = NULL;
+}
+
+nsresult AndroidCaptureProvider::Init(nsACString& aContentType,
+                        nsCaptureParams* aParams,
+                        nsIInputStream** aStream) {
+
+  NS_ENSURE_ARG_POINTER(aParams);
+
+  NS_ASSERTION(aParams->frameLimit == 0 || aParams->timeLimit == 0,
+    "Cannot set both a frame limit and a time limit!");
+
+  nsRefPtr<AndroidCameraInputStream> stream;
+
+  if (aContentType.EqualsLiteral("video/x-raw-yuv")) {
+    stream = new AndroidCameraInputStream();
+    if (stream) {
+      nsresult rv = stream->Init(aContentType, aParams);
+      if (NS_FAILED(rv))
+        return rv;
+    }
+    else
+      return NS_ERROR_OUT_OF_MEMORY;
+  } else {
+    NS_NOTREACHED("Should not have asked Android for this type!");
+  }
+  return CallQueryInterface(stream, aStream);
+}
+
+already_AddRefed<AndroidCaptureProvider> GetAndroidCaptureProvider() {
+  if (!AndroidCaptureProvider::sInstance) {
+    AndroidCaptureProvider::sInstance = new AndroidCaptureProvider();
+  }
+  AndroidCaptureProvider::sInstance->AddRef();
+  return AndroidCaptureProvider::sInstance;
+}
new file mode 100644
--- /dev/null
+++ b/netwerk/protocol/device/AndroidCaptureProvider.h
@@ -0,0 +1,96 @@
+/* -*- Mode: C++; tab-width: 8; indent-tabs-mode: nil; c-basic-offset: 2 -*- */
+/* ***** BEGIN LICENSE BLOCK *****
+ * Version: MPL 1.1/GPL 2.0/LGPL 2.1
+ *
+ * The contents of this file are subject to the Mozilla Public License Version
+ * 1.1 (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ * http://www.mozilla.org/MPL/
+ *
+ * Software distributed under the License is distributed on an "AS IS" basis,
+ * WITHOUT WARRANTY OF ANY KIND, either express or implied. See the License
+ * for the specific language governing rights and limitations under the
+ * License.
+ *
+ * The Original Code is Camera.
+ *
+ * The Initial Developer of the Original Code is Mozilla Corporation
+ * Portions created by the Initial Developer are Copyright (C) 2009
+ * the Initial Developer. All Rights Reserved.
+ *
+ * Contributor(s):
+ *  Fabrice Desré <fabrice@mozilla.com>
+ *
+ * Alternatively, the contents of this file may be used under the terms of
+ * either the GNU General Public License Version 2 or later (the "GPL"), or
+ * the GNU Lesser General Public License Version 2.1 or later (the "LGPL"),
+ * in which case the provisions of the GPL or the LGPL are applicable instead
+ * of those above. If you wish to allow use of your version of this file only
+ * under the terms of either the GPL or the LGPL, and not to allow others to
+ * use your version of this file under the terms of the MPL, indicate your
+ * decision by deleting the provisions above and replace them with the notice
+ * and other provisions required by the GPL or the LGPL. If you do not delete
+ * the provisions above, a recipient may use your version of this file under
+ * the terms of any one of the MPL, the GPL or the LGPL.
+ *
+ * ***** END LICENSE BLOCK ***** */
+
+#ifndef AndroidDeviceCaptureProvide_h_
+#define AndroidDeviceCaptureProvide_h_
+
+#include "nsDeviceCaptureProvider.h"
+#include "nsIAsyncInputStream.h"
+#include "nsCOMPtr.h"
+#include "nsAutoPtr.h"
+#include "nsString.h"
+#include "mozilla/net/CameraStreamImpl.h"
+#include "nsIEventTarget.h"
+#include "nsDeque.h"
+#include "mozilla/ReentrantMonitor.h"
+
+class AndroidCaptureProvider : public nsDeviceCaptureProvider {
+  public:
+    AndroidCaptureProvider();
+    ~AndroidCaptureProvider();
+
+    NS_DECL_ISUPPORTS
+
+    nsresult Init(nsACString& aContentType, nsCaptureParams* aParams, nsIInputStream** aStream);
+    static AndroidCaptureProvider* sInstance;
+};
+
+class AndroidCameraInputStream : public nsIAsyncInputStream, mozilla::net::CameraStreamImpl::FrameCallback {
+  public:
+    AndroidCameraInputStream();
+    ~AndroidCameraInputStream();
+
+    NS_IMETHODIMP Init(nsACString& aContentType, nsCaptureParams* aParams);
+
+    NS_DECL_ISUPPORTS
+    NS_DECL_NSIINPUTSTREAM
+    NS_DECL_NSIASYNCINPUTSTREAM
+
+    void ReceiveFrame(char* frame, PRUint32 length);
+
+  protected:
+    void NotifyListeners();
+    void doClose();
+
+    PRUint32 mAvailable;
+    nsCString mContentType;
+    PRUint32 mWidth;
+    PRUint32 mHeight;
+    PRUint32 mCamera;
+    bool mHeaderSent;
+    bool mClosed;
+    nsDeque *mFrameQueue;
+    PRUint32 mFrameSize;
+    mozilla::ReentrantMonitor mMonitor;
+
+    nsCOMPtr<nsIInputStreamCallback> mCallback;
+    nsCOMPtr<nsIEventTarget> mCallbackTarget;
+};
+
+already_AddRefed<AndroidCaptureProvider> GetAndroidCaptureProvider();
+
+#endif
new file mode 100644
--- /dev/null
+++ b/netwerk/protocol/device/CameraStreamImpl.cpp
@@ -0,0 +1,128 @@
+/* ***** BEGIN LICENSE BLOCK *****
+ * Version: MPL 1.1/GPL 2.0/LGPL 2.1
+ *
+ * The contents of this file are subject to the Mozilla Public License Version
+ * 1.1 (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ * http://www.mozilla.org/MPL/
+ *
+ * Software distributed under the License is distributed on an "AS IS" basis,
+ * WITHOUT WARRANTY OF ANY KIND, either express or implied. See the License
+ * for the specific language governing rights and limitations under the
+ * License.
+ *
+ * The Original Code is Camera.
+ *
+ * The Initial Developer of the Original Code is Mozilla Corporation
+ * Portions created by the Initial Developer are Copyright (C) 2009
+ * the Initial Developer. All Rights Reserved.
+ *
+ * Contributor(s):
+ *  Fabrice Desré <fabrice@mozilla.com>
+ *
+ * Alternatively, the contents of this file may be used under the terms of
+ * either the GNU General Public License Version 2 or later (the "GPL"), or
+ * the GNU Lesser General Public License Version 2.1 or later (the "LGPL"),
+ * in which case the provisions of the GPL or the LGPL are applicable instead
+ * of those above. If you wish to allow use of your version of this file only
+ * under the terms of either the GPL or the LGPL, and not to allow others to
+ * use your version of this file under the terms of the MPL, indicate your
+ * decision by deleting the provisions above and replace them with the notice
+ * and other provisions required by the GPL or the LGPL. If you do not delete
+ * the provisions above, a recipient may use your version of this file under
+ * the terms of any one of the MPL, the GPL or the LGPL.
+ *
+ * ***** END LICENSE BLOCK ***** */
+
+#include "CameraStreamImpl.h"
+#include "nsCRTGlue.h"
+#include "nsThreadUtils.h"
+#include "nsXULAppAPI.h"
+#include "mozilla/Monitor.h"
+
+/**
+ * JNI part & helper runnable
+ */
+
+extern "C" {
+    NS_EXPORT void JNICALL Java_org_mozilla_gecko_GeckoAppShell_cameraCallbackBridge(JNIEnv *, jclass, jbyteArray data);
+}
+
+NS_EXPORT void JNICALL
+Java_org_mozilla_gecko_GeckoAppShell_cameraCallbackBridge(JNIEnv *env, jclass, jbyteArray data) {
+    mozilla::net::CameraStreamImpl* impl = mozilla::net::CameraStreamImpl::GetInstance(0);
+    
+    impl->transmitFrame(env, &data);
+}
+
+using namespace mozilla;
+
+namespace mozilla {
+namespace net {
+
+static CameraStreamImpl* mCamera0 = NULL;
+static CameraStreamImpl* mCamera1 = NULL;
+
+/**
+ * CameraStreamImpl
+ */
+
+void CameraStreamImpl::transmitFrame(JNIEnv *env, jbyteArray *data) {
+    jboolean isCopy;
+    jbyte* jFrame = env->GetByteArrayElements(*data, &isCopy);
+    PRUint32 length = env->GetArrayLength(*data);
+    if (length > 0) {
+        mCallback->ReceiveFrame((char*)jFrame, length);
+    }
+    env->ReleaseByteArrayElements(*data, jFrame, 0);
+}
+
+CameraStreamImpl* CameraStreamImpl::GetInstance(PRUint32 aCamera) {
+    CameraStreamImpl* res = NULL;
+    switch(aCamera) {
+        case 0:
+            if (mCamera0)
+                res = mCamera0;
+            else
+                res = mCamera0 = new CameraStreamImpl(aCamera);
+            break;
+        case 1:
+            if (mCamera1)
+                res = mCamera1;
+            else
+                res = mCamera1 = new CameraStreamImpl(aCamera);
+            break;
+    }
+    return res;
+}
+
+
+CameraStreamImpl::CameraStreamImpl(PRUint32 aCamera) :
+ mInit(false), mCamera(aCamera)
+{
+    NS_WARNING("CameraStreamImpl::CameraStreamImpl()");
+    mWidth = 0;
+    mHeight = 0;
+    mFps = 0;
+}
+
+CameraStreamImpl::~CameraStreamImpl()
+{
+    NS_WARNING("CameraStreamImpl::~CameraStreamImpl()");
+}
+
+bool CameraStreamImpl::Init(const nsCString& contentType, const PRUint32& camera, const PRUint32& width, const PRUint32& height, FrameCallback* aCallback)
+{
+    mCallback = aCallback;
+    mWidth = width;
+    mHeight = height;
+    return AndroidBridge::Bridge()->InitCamera(contentType, camera, &mWidth, &mHeight, &mFps);
+}
+
+void CameraStreamImpl::Close() {
+    AndroidBridge::Bridge()->CloseCamera();
+    mCallback = NULL;
+}
+
+} // namespace net
+} // namespace mozilla
new file mode 100644
--- /dev/null
+++ b/netwerk/protocol/device/CameraStreamImpl.h
@@ -0,0 +1,100 @@
+/* ***** BEGIN LICENSE BLOCK *****
+ * Version: MPL 1.1/GPL 2.0/LGPL 2.1
+ *
+ * The contents of this file are subject to the Mozilla Public License Version
+ * 1.1 (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ * http://www.mozilla.org/MPL/
+ *
+ * Software distributed under the License is distributed on an "AS IS" basis,
+ * WITHOUT WARRANTY OF ANY KIND, either express or implied. See the License
+ * for the specific language governing rights and limitations under the
+ * License.
+ *
+ * The Original Code is Camera.
+ *
+ * The Initial Developer of the Original Code is Mozilla Corporation
+ * Portions created by the Initial Developer are Copyright (C) 2009
+ * the Initial Developer. All Rights Reserved.
+ *
+ * Contributor(s):
+ *  Fabrice Desré <fabrice@mozilla.com>
+ *
+ * Alternatively, the contents of this file may be used under the terms of
+ * either the GNU General Public License Version 2 or later (the "GPL"), or
+ * the GNU Lesser General Public License Version 2.1 or later (the "LGPL"),
+ * in which case the provisions of the GPL or the LGPL are applicable instead
+ * of those above. If you wish to allow use of your version of this file only
+ * under the terms of either the GPL or the LGPL, and not to allow others to
+ * use your version of this file under the terms of the MPL, indicate your
+ * decision by deleting the provisions above and replace them with the notice
+ * and other provisions required by the GPL or the LGPL. If you do not delete
+ * the provisions above, a recipient may use your version of this file under
+ * the terms of any one of the MPL, the GPL or the LGPL.
+ *
+ * ***** END LICENSE BLOCK ***** */
+
+#ifndef __CAMERASTREAMIMPL_H__
+#define __CAMERASTREAMIMPL_H__
+
+#include "nsString.h"
+#include "AndroidBridge.h"
+
+/**
+ * This singleton class handles communication with the Android camera
+ * through JNI. It is used by the IPDL parent or directly from the chrome process
+ */
+
+namespace mozilla {
+namespace net {
+    
+class CameraStreamImpl {
+public:
+    class FrameCallback {
+    public:
+        virtual void ReceiveFrame(char* frame, PRUint32 length) = 0;
+    };
+    
+    /**
+     * instance bound to a given camera
+     */
+    static CameraStreamImpl* GetInstance(PRUint32 aCamera);
+    
+    bool initNeeded() {
+        return !mInit;
+    }
+    
+    FrameCallback* GetFrameCallback() {
+        return mCallback;
+    }
+    
+    bool Init(const nsCString& contentType, const PRUint32& camera, const PRUint32& width, const PRUint32& height, FrameCallback* callback);
+    void Close();
+    
+    PRUint32 GetWidth() { return mWidth; }
+    PRUint32 GetHeight() { return mHeight; }
+    PRUint32 GetFps() { return mFps; }
+    
+    void takePicture(const nsAString& aFileName);
+    
+    void transmitFrame(JNIEnv *env, jbyteArray *data);
+    
+private:
+    CameraStreamImpl(PRUint32 aCamera);
+    CameraStreamImpl(const CameraStreamImpl&);
+    CameraStreamImpl& operator=(const CameraStreamImpl&);
+
+    ~CameraStreamImpl();
+
+    bool mInit;
+    PRUint32 mCamera;
+    PRUint32 mWidth;
+    PRUint32 mHeight;
+    PRUint32 mFps;
+    FrameCallback* mCallback;
+};
+
+} // namespace net
+} // namespace mozilla
+
+#endif
--- a/netwerk/protocol/device/Makefile.in
+++ b/netwerk/protocol/device/Makefile.in
@@ -46,13 +46,29 @@ LIBRARY_NAME = nkdevice_s
 FORCE_STATIC_LIB = 1
 LIBXUL_LIBRARY = 1
 
 CPPSRCS = \
   nsDeviceChannel.cpp \
   nsDeviceProtocolHandler.cpp \
   $(NULL)
 
+ifeq (Android,$(OS_TARGET))
+CPPSRCS += AndroidCaptureProvider.cpp \
+           CameraStreamImpl.cpp \
+           $(NULL)
+
+EXPORTS_NAMESPACES = mozilla/net
+
+EXPORTS_mozilla/net += \
+  CameraStreamImpl.h  \
+  $(NULL)
+
+endif
+
 LOCAL_INCLUDES = -I$(srcdir)/../../base/src/ \
-                 -I$(srcdir)/../file \
                  $(NULL)
 
+include $(topsrcdir)/config/config.mk
+include $(topsrcdir)/ipc/chromium/chromium-config.mk
 include $(topsrcdir)/config/rules.mk
+
+DEFINES += -DIMPL_NS_NET
--- a/netwerk/protocol/device/nsDeviceCaptureProvider.h
+++ b/netwerk/protocol/device/nsDeviceCaptureProvider.h
@@ -46,16 +46,17 @@ struct nsCaptureParams {
   PRPackedBool captureAudio;
   PRPackedBool captureVideo;
   PRUint32 frameRate;
   PRUint32 frameLimit;
   PRUint32 timeLimit;
   PRUint32 width;
   PRUint32 height;
   PRUint32 bpp;
+  PRUint32 camera;
 };
 
 class nsDeviceCaptureProvider : public nsISupports
 {
 public:
   virtual nsresult Init(nsACString& aContentType,
                         nsCaptureParams* aParams,
                         nsIInputStream** aStream) = 0;
--- a/netwerk/protocol/device/nsDeviceChannel.cpp
+++ b/netwerk/protocol/device/nsDeviceChannel.cpp
@@ -34,16 +34,21 @@
  * the terms of any one of the MPL, the GPL or the LGPL.
  *
  * ***** END LICENSE BLOCK ***** */
 
 #include "plstr.h"
 #include "nsComponentManagerUtils.h"
 #include "nsDeviceChannel.h"
 #include "nsDeviceCaptureProvider.h"
+#include "mozilla/Preferences.h"
+
+#ifdef ANDROID
+#include "AndroidCaptureProvider.h"
+#endif
 
 // Copied from modules/libpr0n/decoders/icon/nsIconURI.cpp
 // takes a string like ?size=32&contentType=text/html and returns a new string
 // containing just the attribute values. i.e you could pass in this string with
 // an attribute name of "size=", this will return 32
 // Assumption: attribute pairs are separated by &
 void extractAttributeValue(const char* searchString, const char* attributeName, nsCString& result)
 {
@@ -105,20 +110,20 @@ nsDeviceChannel::OpenContentStream(PRBoo
   *aChannel = nsnull;
   NS_NAMED_LITERAL_CSTRING(width, "width=");
   NS_NAMED_LITERAL_CSTRING(height, "height=");
 
   nsCAutoString spec;
   uri->GetSpec(spec);
 
   nsCAutoString type;
-  // Because no capture providers are implemented at the moment
-  // capture will always be null and this function will always fail
+
   nsRefPtr<nsDeviceCaptureProvider> capture;
   nsCaptureParams captureParams;
+  captureParams.camera = 0;
   if (kNotFound != spec.Find(NS_LITERAL_CSTRING("type=image/png"),
                              PR_TRUE,
                              0,
                              -1)) {
     type.AssignLiteral("image/png");
     SetContentType(type);
     captureParams.captureAudio = PR_FALSE;
     captureParams.captureVideo = PR_TRUE;
@@ -130,28 +135,45 @@ nsDeviceChannel::OpenContentStream(PRBoo
     captureParams.width = buffer.ToInteger(&err);
     if (!captureParams.width)
       captureParams.width = 640;
     extractAttributeValue(spec.get(), "height=", buffer);
     captureParams.height = buffer.ToInteger(&err);
     if (!captureParams.height)
       captureParams.height = 480;
     captureParams.bpp = 32;
+#ifdef ANDROID
+    capture = GetAndroidCaptureProvider();
+#endif
   } else if (kNotFound != spec.Find(NS_LITERAL_CSTRING("type=video/x-raw-yuv"),
                                     PR_TRUE,
                                     0,
                                     -1)) {
     type.AssignLiteral("video/x-raw-yuv");
     SetContentType(type);
     captureParams.captureAudio = PR_FALSE;
     captureParams.captureVideo = PR_TRUE;
-    captureParams.width = 640;
-    captureParams.height = 480;
+    nsCAutoString buffer;
+    extractAttributeValue(spec.get(), "width=", buffer);
+    nsresult err;
+    captureParams.width = buffer.ToInteger(&err);
+    if (!captureParams.width)
+      captureParams.width = 640;
+    extractAttributeValue(spec.get(), "height=", buffer);
+    captureParams.height = buffer.ToInteger(&err);
+    if (!captureParams.height)
+      captureParams.height = 480;
     captureParams.bpp = 32;
-    captureParams.frameLimit = 6000;
+    captureParams.timeLimit = 0;
+    captureParams.frameLimit = 60000;
+#ifdef ANDROID
+    // only enable if "device.camera.enabled" is true.
+    if (mozilla::Preferences::GetBool("device.camera.enabled", PR_FALSE) == PR_TRUE)
+      capture = GetAndroidCaptureProvider();
+#endif
   } else {
     return NS_ERROR_NOT_IMPLEMENTED;
   }
 
   if (!capture)
     return NS_ERROR_FAILURE;
 
   return capture->Init(type, &captureParams, aStream);
--- a/other-licenses/android/APKOpen.cpp
+++ b/other-licenses/android/APKOpen.cpp
@@ -237,16 +237,17 @@ SHELL_WRAPPER0(processNextNativeEvent)
 SHELL_WRAPPER1(setSurfaceView, jobject)
 SHELL_WRAPPER0(onResume)
 SHELL_WRAPPER0(onLowMemory)
 SHELL_WRAPPER3(callObserver, jstring, jstring, jstring)
 SHELL_WRAPPER1(removeObserver, jstring)
 SHELL_WRAPPER2(onChangeNetworkLinkStatus, jstring, jstring)
 SHELL_WRAPPER1(reportJavaCrash, jstring)
 SHELL_WRAPPER0(executeNextRunnable)
+SHELL_WRAPPER1(cameraCallbackBridge, jbyteArray)
 
 static void * xul_handle = NULL;
 static time_t apk_mtime = 0;
 #ifdef DEBUG
 extern "C" int extractLibs = 1;
 #else
 extern "C" int extractLibs = 0;
 #endif
@@ -669,16 +670,17 @@ loadLibs(const char *apkName)
   GETFUNC(setSurfaceView);
   GETFUNC(onResume);
   GETFUNC(onLowMemory);
   GETFUNC(callObserver);
   GETFUNC(removeObserver);
   GETFUNC(onChangeNetworkLinkStatus);
   GETFUNC(reportJavaCrash);
   GETFUNC(executeNextRunnable);
+  GETFUNC(cameraCallbackBridge);
 #undef GETFUNC
   gettimeofday(&t1, 0);
   struct rusage usage2;
   getrusage(RUSAGE_SELF, &usage2);
   __android_log_print(ANDROID_LOG_ERROR, "GeckoLibLoad", "Loaded libs in %dms total, %dms user, %dms system, %d faults",
                       (t1.tv_sec - t0.tv_sec)*1000 + (t1.tv_usec - t0.tv_usec)/1000, 
                       (usage2.ru_utime.tv_sec - usage1.ru_utime.tv_sec)*1000 + (usage2.ru_utime.tv_usec - usage1.ru_utime.tv_usec)/1000,
                       (usage2.ru_stime.tv_sec - usage1.ru_stime.tv_sec)*1000 + (usage2.ru_stime.tv_usec - usage1.ru_stime.tv_usec)/1000,
--- a/widget/src/android/AndroidBridge.cpp
+++ b/widget/src/android/AndroidBridge.cpp
@@ -146,16 +146,18 @@ AndroidBridge::Init(JNIEnv *jEnv,
     jGetNetworkLinkType = (jmethodID) jEnv->GetStaticMethodID(jGeckoAppShellClass, "getNetworkLinkType", "()I");
     jSetSelectedLocale = (jmethodID) jEnv->GetStaticMethodID(jGeckoAppShellClass, "setSelectedLocale", "(Ljava/lang/String;)V");
     jScanMedia = (jmethodID) jEnv->GetStaticMethodID(jGeckoAppShellClass, "scanMedia", "(Ljava/lang/String;Ljava/lang/String;)V");
     jGetSystemColors = (jmethodID) jEnv->GetStaticMethodID(jGeckoAppShellClass, "getSystemColors", "()[I");
     jGetIconForExtension = (jmethodID) jEnv->GetStaticMethodID(jGeckoAppShellClass, "getIconForExtension", "(Ljava/lang/String;I)[B");
     jCreateShortcut = (jmethodID) jEnv->GetStaticMethodID(jGeckoAppShellClass, "createShortcut", "(Ljava/lang/String;Ljava/lang/String;Ljava/lang/String;Ljava/lang/String;)V");
     jGetShowPasswordSetting = (jmethodID) jEnv->GetStaticMethodID(jGeckoAppShellClass, "getShowPasswordSetting", "()Z");
     jPostToJavaThread = (jmethodID) jEnv->GetStaticMethodID(jGeckoAppShellClass, "postToJavaThread", "(Z)V");
+    jInitCamera = (jmethodID) jEnv->GetStaticMethodID(jGeckoAppShellClass, "initCamera", "(Ljava/lang/String;III)[I");
+    jCloseCamera = (jmethodID) jEnv->GetStaticMethodID(jGeckoAppShellClass, "closeCamera", "()V");
 
     jEGLContextClass = (jclass) jEnv->NewGlobalRef(jEnv->FindClass("javax/microedition/khronos/egl/EGLContext"));
     jEGL10Class = (jclass) jEnv->NewGlobalRef(jEnv->FindClass("javax/microedition/khronos/egl/EGL10"));
     jEGLSurfaceImplClass = (jclass) jEnv->NewGlobalRef(jEnv->FindClass("com/google/android/gles_jni/EGLSurfaceImpl"));
     jEGLContextImplClass = (jclass) jEnv->NewGlobalRef(jEnv->FindClass("com/google/android/gles_jni/EGLContextImpl"));
     jEGLConfigImplClass = (jclass) jEnv->NewGlobalRef(jEnv->FindClass("com/google/android/gles_jni/EGLConfigImpl"));
     jEGLDisplayImplClass = (jclass) jEnv->NewGlobalRef(jEnv->FindClass("com/google/android/gles_jni/EGLDisplayImpl"));
 
@@ -1045,16 +1047,48 @@ AndroidBridge::ValidateBitmap(jobject bi
     }
 
     if (info.width != width || info.height != height)
         return false;
 
     return true;
 }
 
+bool
+AndroidBridge::InitCamera(const nsCString& contentType, PRUint32 camera, PRUint32 *width, PRUint32 *height, PRUint32 *fps)
+{
+    AutoLocalJNIFrame jniFrame;
+
+    NS_ConvertASCIItoUTF16 s(contentType);
+    jstring jstrContentType = mJNIEnv->NewString(s.get(), NS_strlen(s.get()));
+    jobject obj = mJNIEnv->CallStaticObjectMethod(mGeckoAppShellClass, jInitCamera, jstrContentType, camera, *width, *height);
+    jintArray arr = static_cast<jintArray>(obj);
+    if (!arr)
+        return false;
+
+    jint *elements = mJNIEnv->GetIntArrayElements(arr, 0);
+
+    *width = elements[1];
+    *height = elements[2];
+    *fps = elements[3];
+
+    bool res = elements[0] == 1;
+
+    mJNIEnv->ReleaseIntArrayElements(arr, elements, 0);
+
+    return res;
+}
+
+void
+AndroidBridge::CloseCamera() {
+    AutoLocalJNIFrame jniFrame;
+
+    mJNIEnv->CallStaticVoidMethod(mGeckoAppShellClass, jCloseCamera);
+}
+
 void *
 AndroidBridge::LockBitmap(jobject bitmap)
 {
     int err;
     void *buf;
 
     if ((err = AndroidBitmap_lockPixels(JNI(), bitmap, &buf)) != 0) {
         ALOG_BRIDGE("AndroidBitmap_lockPixels failed! (error %d)", err);
--- a/widget/src/android/AndroidBridge.h
+++ b/widget/src/android/AndroidBridge.h
@@ -278,16 +278,20 @@ public:
 
     void *AcquireNativeWindow(jobject surface);
     void ReleaseNativeWindow(void *window);
     bool SetNativeWindowFormat(void *window, int format);
 
     bool LockWindow(void *window, unsigned char **bits, int *width, int *height, int *format, int *stride);
     bool UnlockWindow(void *window);
 
+    bool InitCamera(const nsCString& contentType, PRUint32 camera, PRUint32 *width, PRUint32 *height, PRUint32 *fps);
+
+    void CloseCamera();
+
 protected:
     static AndroidBridge *sBridge;
 
     // the global JavaVM
     JavaVM *mJavaVM;
 
     // the JNIEnv for the main thread
     JNIEnv *mJNIEnv;
@@ -347,16 +351,18 @@ protected:
     jmethodID jGetNetworkLinkType;
     jmethodID jSetSelectedLocale;
     jmethodID jScanMedia;
     jmethodID jGetSystemColors;
     jmethodID jGetIconForExtension;
     jmethodID jCreateShortcut;
     jmethodID jGetShowPasswordSetting;
     jmethodID jPostToJavaThread;
+    jmethodID jInitCamera;
+    jmethodID jCloseCamera;
 
     // stuff we need for CallEglCreateWindowSurface
     jclass jEGLSurfaceImplClass;
     jclass jEGLContextImplClass;
     jclass jEGLConfigImplClass;
     jclass jEGLDisplayImplClass;
     jclass jEGLContextClass;
     jclass jEGL10Class;