Bug 776062: Add support for recording video on gonk. r=cjones,double,ikumar,mikeh sr=sicking
authorInder Kumar <ikumar@codeaurora.org> and Mike Habicher <mhabicher@mozilla.com>
Fri, 28 Sep 2012 22:30:52 -0700
changeset 108612 68c4c30ff6f0a8ad3cd37a24dd3db452c24d9f3b
parent 108611 02a62c14ec3b5d72cd014422b7427c2e9b2e5012
child 108613 59665618b6c99e30e014fa5181fc73b6b008c2e3
push id1134
push userttaubert@mozilla.com
push dateTue, 02 Oct 2012 08:00:42 +0000
treeherderfx-team@85dd8e346102 [default view] [failures only]
perfherder[talos] [build metrics] [platform microbench] (compared to previous push)
reviewerscjones, double, ikumar, mikeh, sicking
bugs776062
milestone18.0a1
Bug 776062: Add support for recording video on gonk. r=cjones,double,ikumar,mikeh sr=sicking
configure.in
dom/camera/AudioParameter.cpp
dom/camera/CameraControlImpl.cpp
dom/camera/CameraControlImpl.h
dom/camera/DOMCameraControl.cpp
dom/camera/GonkCameraControl.cpp
dom/camera/GonkCameraControl.h
dom/camera/GonkCameraHwMgr.cpp
dom/camera/GonkCameraHwMgr.h
dom/camera/GonkCameraListener.h
dom/camera/GonkCameraSource.cpp
dom/camera/GonkCameraSource.h
dom/camera/GonkRecorder.cpp
dom/camera/GonkRecorder.h
dom/camera/ICameraControl.h
dom/camera/Makefile.in
dom/camera/README
dom/camera/nsIDOMCameraManager.idl
dom/camera/update.patch
dom/camera/update.sh
dom/camera/update2.patch
dom/devicestorage/DeviceStorageRequestChild.cpp
dom/devicestorage/nsDeviceStorage.cpp
dom/interfaces/devicestorage/nsIDOMDeviceStorage.idl
js/xpconnect/src/dictionary_helper_gen.conf
toolkit/library/Makefile.in
--- a/configure.in
+++ b/configure.in
@@ -1,8 +1,9 @@
+
 dnl -*- Mode: Autoconf; tab-width: 4; indent-tabs-mode: nil; -*-
 dnl vi: set tabstop=4 shiftwidth=4 expandtab syntax=m4:
 dnl This Source Code Form is subject to the terms of the Mozilla Public
 dnl License, v. 2.0. If a copy of the MPL was not distributed with this
 dnl file, You can obtain one at http://mozilla.org/MPL/2.0/.
 
 dnl Process this file with autoconf to produce a configure script.
 dnl ========================================================
@@ -192,17 +193,17 @@ if test -n "$gonkdir" ; then
     arm)
         ARCH_DIR=arch-arm
         ;;
     i?86)
         ARCH_DIR=arch-x86
         ;;
     esac
 
-    CPPFLAGS="-DANDROID -isystem $gonkdir/bionic/libc/$ARCH_DIR/include -isystem $gonkdir/bionic/libc/include/ -isystem $gonkdir/bionic/libc/kernel/common -isystem $gonkdir/bionic/libc/kernel/$ARCH_DIR -isystem $gonkdir/bionic/libm/include -I$gonkdir/frameworks/base/opengl/include -I$gonkdir/frameworks/base/native/include -I$gonkdir/hardware/libhardware/include -I$gonkdir/hardware/libhardware_legacy/include -I$gonkdir/system -I$gonkdir/system/core/include -isystem $gonkdir/bionic -I$gonkdir/frameworks/base/include -I$gonkdir/external/dbus -I$gonkdir/external/bluetooth/bluez/lib $CPPFLAGS -I$gonkdir/frameworks/base/services/sensorservice -I$gonkdir/frameworks/base/services/camera -I$gonkdir/system/media/wilhelm/include"
+    CPPFLAGS="-DANDROID -isystem $gonkdir/bionic/libc/$ARCH_DIR/include -isystem $gonkdir/bionic/libc/include/ -isystem $gonkdir/bionic/libc/kernel/common -isystem $gonkdir/bionic/libc/kernel/$ARCH_DIR -isystem $gonkdir/bionic/libm/include -I$gonkdir/frameworks/base/opengl/include -I$gonkdir/frameworks/base/native/include -I$gonkdir/hardware/libhardware/include -I$gonkdir/hardware/libhardware_legacy/include -I$gonkdir/system -I$gonkdir/system/core/include -isystem $gonkdir/bionic -I$gonkdir/frameworks/base/include -I$gonkdir/external/dbus -I$gonkdir/external/bluetooth/bluez/lib $CPPFLAGS -I$gonkdir/frameworks/base/services/sensorservice -I$gonkdir/frameworks/base/services/camera -I$gonkdir/system/media/wilhelm/include -I$gonkdir/frameworks/base/include/media/stagefright -I$gonkdir/frameworks/base/include/media/stagefright/openmax -I$gonkdir/frameworks/base/media/libstagefright/rtsp -I$gonkdir/frameworks/base/media/libstagefright/include -I$gonkdir/dalvik/libnativehelper/include/nativehelper"
     CFLAGS="-mandroid -fno-short-enums -fno-exceptions $CFLAGS"
     CXXFLAGS="-mandroid -fno-short-enums -fno-exceptions -Wno-psabi $CXXFLAGS $STLPORT_CPPFLAGS"
     dnl Add -llog by default, since we use it all over the place.
     LIBS="$LIBS -llog $STLPORT_LIBS"
 
     LDFLAGS="-mandroid -L$gonkdir/out/target/product/$GONK_PRODUCT/obj/lib -Wl,-rpath-link=$gonkdir/out/target/product/$GONK_PRODUCT/obj/lib --sysroot=$gonkdir/out/target/product/$GONK_PRODUCT/obj/ $LDFLAGS"
 
     dnl prevent cross compile section from using these flags as host flags
new file mode 100644
--- /dev/null
+++ b/dom/camera/AudioParameter.cpp
@@ -0,0 +1,179 @@
+/*
+ * Copyright (C) 2006-2011 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#define LOG_TAG "AudioParameter"
+//#define LOG_NDEBUG 0
+
+#include <utils/Log.h>
+
+#include <media/AudioParameter.h>
+
+namespace android {
+
+const char *AudioParameter::keyRouting = "routing";
+const char *AudioParameter::keySamplingRate = "sampling_rate";
+const char *AudioParameter::keyFormat = "format";
+const char *AudioParameter::keyChannels = "channels";
+const char *AudioParameter::keyFrameCount = "frame_count";
+const char *AudioParameter::keyInputSource = "input_source";
+
+AudioParameter::AudioParameter(const String8& keyValuePairs)
+{
+    char *str = new char[keyValuePairs.length()+1];
+    mKeyValuePairs = keyValuePairs;
+
+    strcpy(str, keyValuePairs.string());
+    char *pair = strtok(str, ";");
+    while (pair != NULL) {
+        if (strlen(pair) != 0) {
+            size_t eqIdx = strcspn(pair, "=");
+            String8 key = String8(pair, eqIdx);
+            String8 value;
+            if (eqIdx == strlen(pair)) {
+                value = String8("");
+            } else {
+                value = String8(pair + eqIdx + 1);
+            }
+            if (mParameters.indexOfKey(key) < 0) {
+                mParameters.add(key, value);
+            } else {
+                mParameters.replaceValueFor(key, value);
+            }
+        } else {
+            LOGV("AudioParameter() cstor empty key value pair");
+        }
+        pair = strtok(NULL, ";");
+    }
+
+    delete[] str;
+}
+
+AudioParameter::~AudioParameter()
+{
+    mParameters.clear();
+}
+
+String8 AudioParameter::toString()
+{
+    String8 str = String8("");
+
+    size_t size = mParameters.size();
+    for (size_t i = 0; i < size; i++) {
+        str += mParameters.keyAt(i);
+        str += "=";
+        str += mParameters.valueAt(i);
+        if (i < (size - 1)) str += ";";
+    }
+    return str;
+}
+
+status_t AudioParameter::add(const String8& key, const String8& value)
+{
+    if (mParameters.indexOfKey(key) < 0) {
+        mParameters.add(key, value);
+        return NO_ERROR;
+    } else {
+        mParameters.replaceValueFor(key, value);
+        return ALREADY_EXISTS;
+    }
+}
+
+status_t AudioParameter::addInt(const String8& key, const int value)
+{
+    char str[12];
+    if (snprintf(str, 12, "%d", value) > 0) {
+        String8 str8 = String8(str);
+        return add(key, str8);
+    } else {
+        return BAD_VALUE;
+    }
+}
+
+status_t AudioParameter::addFloat(const String8& key, const float value)
+{
+    char str[23];
+    if (snprintf(str, 23, "%.10f", value) > 0) {
+        String8 str8 = String8(str);
+        return add(key, str8);
+    } else {
+        return BAD_VALUE;
+    }
+}
+
+status_t AudioParameter::remove(const String8& key)
+{
+    if (mParameters.indexOfKey(key) >= 0) {
+        mParameters.removeItem(key);
+        return NO_ERROR;
+    } else {
+        return BAD_VALUE;
+    }
+}
+
+status_t AudioParameter::get(const String8& key, String8& value)
+{
+    if (mParameters.indexOfKey(key) >= 0) {
+        value = mParameters.valueFor(key);
+        return NO_ERROR;
+    } else {
+        return BAD_VALUE;
+    }
+}
+
+status_t AudioParameter::getInt(const String8& key, int& value)
+{
+    String8 str8;
+    status_t result = get(key, str8);
+    value = 0;
+    if (result == NO_ERROR) {
+        int val;
+        if (sscanf(str8.string(), "%d", &val) == 1) {
+            value = val;
+        } else {
+            result = INVALID_OPERATION;
+        }
+    }
+    return result;
+}
+
+status_t AudioParameter::getFloat(const String8& key, float& value)
+{
+    String8 str8;
+    status_t result = get(key, str8);
+    value = 0;
+    if (result == NO_ERROR) {
+        float val;
+        if (sscanf(str8.string(), "%f", &val) == 1) {
+            value = val;
+        } else {
+            result = INVALID_OPERATION;
+        }
+    }
+    return result;
+}
+
+status_t AudioParameter::getAt(size_t index, String8& key, String8& value)
+{
+    if (mParameters.size() > index) {
+        key = mParameters.keyAt(index);
+        value = mParameters.valueAt(index);
+        return NO_ERROR;
+    } else {
+        return BAD_VALUE;
+    }
+}
+
+};  // namespace android
--- a/dom/camera/CameraControlImpl.cpp
+++ b/dom/camera/CameraControlImpl.cpp
@@ -191,19 +191,19 @@ CameraControlImpl::AutoFocus(nsICameraAu
 nsresult
 CameraControlImpl::TakePicture(CameraSize aSize, int32_t aRotation, const nsAString& aFileFormat, CameraPosition aPosition, nsICameraTakePictureCallback* onSuccess, nsICameraErrorCallback* onError)
 {
   nsCOMPtr<nsIRunnable> takePictureTask = new TakePictureTask(this, aSize, aRotation, aFileFormat, aPosition, onSuccess, onError);
   return mCameraThread->Dispatch(takePictureTask, NS_DISPATCH_NORMAL);
 }
 
 nsresult
-CameraControlImpl::StartRecording(CameraSize aSize, nsICameraStartRecordingCallback* onSuccess, nsICameraErrorCallback* onError)
+CameraControlImpl::StartRecording(nsIDOMDeviceStorage* aStorageArea, const nsAString& aFilename, nsICameraStartRecordingCallback* onSuccess, nsICameraErrorCallback* onError)
 {
-  nsCOMPtr<nsIRunnable> startRecordingTask = new StartRecordingTask(this, aSize, onSuccess, onError);
+  nsCOMPtr<nsIRunnable> startRecordingTask = new StartRecordingTask(this, aStorageArea, aFilename, onSuccess, onError);
   return mCameraThread->Dispatch(startRecordingTask, NS_DISPATCH_NORMAL);
 }
 
 nsresult
 CameraControlImpl::StopRecording()
 {
   nsCOMPtr<nsIRunnable> stopRecordingTask = new StopRecordingTask(this);
   return mCameraThread->Dispatch(stopRecordingTask, NS_DISPATCH_NORMAL);
@@ -218,16 +218,23 @@ CameraControlImpl::StartPreview(DOMCamer
 
 void
 CameraControlImpl::StopPreview()
 {
   nsCOMPtr<nsIRunnable> stopPreviewTask = new StopPreviewTask(this);
   mCameraThread->Dispatch(stopPreviewTask, NS_DISPATCH_NORMAL);
 }
 
+nsresult
+CameraControlImpl::GetPreviewStreamVideoMode(CameraRecordingOptions* aOptions, nsICameraPreviewStreamCallback* onSuccess, nsICameraErrorCallback* onError)
+{
+  nsCOMPtr<nsIRunnable> getPreviewStreamVideoModeTask = new GetPreviewStreamVideoModeTask(this, *aOptions, onSuccess, onError);
+  return mCameraThread->Dispatch(getPreviewStreamVideoModeTask, NS_DISPATCH_NORMAL);
+}
+
 bool
 CameraControlImpl::ReceiveFrame(void* aBuffer, ImageFormat aFormat, FrameBuilder aBuilder)
 {
   if (!mDOMPreview) {
     return false;
   }
 
   return mDOMPreview->ReceiveFrame(aBuffer, aFormat, aBuilder);
--- a/dom/camera/CameraControlImpl.h
+++ b/dom/camera/CameraControlImpl.h
@@ -3,16 +3,17 @@
  * You can obtain one at http://mozilla.org/MPL/2.0/. */
 
 #ifndef DOM_CAMERA_CAMERACONTROLIMPL_H
 #define DOM_CAMERA_CAMERACONTROLIMPL_H
 
 #include "nsCOMPtr.h"
 #include "nsDOMFile.h"
 #include "DictionaryHelpers.h"
+#include "nsIDOMDeviceStorage.h"
 #include "nsIDOMCameraManager.h"
 #include "ICameraControl.h"
 #include "CameraCommon.h"
 
 namespace mozilla {
 
 using namespace dom;
 
@@ -20,30 +21,32 @@ class GetPreviewStreamTask;
 class StartPreviewTask;
 class StopPreviewTask;
 class AutoFocusTask;
 class TakePictureTask;
 class StartRecordingTask;
 class StopRecordingTask;
 class SetParameterTask;
 class GetParameterTask;
+class GetPreviewStreamVideoModeTask;
 
 class DOMCameraPreview;
 
 class CameraControlImpl : public ICameraControl
 {
   friend class GetPreviewStreamTask;
   friend class StartPreviewTask;
   friend class StopPreviewTask;
   friend class AutoFocusTask;
   friend class TakePictureTask;
   friend class StartRecordingTask;
   friend class StopRecordingTask;
   friend class SetParameterTask;
   friend class GetParameterTask;
+  friend class GetPreviewStreamVideoModeTask;
 
 public:
   CameraControlImpl(uint32_t aCameraId, nsIThread* aCameraThread)
     : mCameraId(aCameraId)
     , mCameraThread(aCameraThread)
     , mFileFormat()
     , mMaxMeteringAreas(0)
     , mMaxFocusAreas(0)
@@ -59,18 +62,19 @@ public:
     DOM_CAMERA_LOGT("%s:%d : this=%p\n", __func__, __LINE__, this);
   }
 
   nsresult GetPreviewStream(CameraSize aSize, nsICameraPreviewStreamCallback* onSuccess, nsICameraErrorCallback* onError);
   nsresult StartPreview(DOMCameraPreview* aDOMPreview);
   void StopPreview();
   nsresult AutoFocus(nsICameraAutoFocusCallback* onSuccess, nsICameraErrorCallback* onError);
   nsresult TakePicture(CameraSize aSize, int32_t aRotation, const nsAString& aFileFormat, CameraPosition aPosition, nsICameraTakePictureCallback* onSuccess, nsICameraErrorCallback* onError);
-  nsresult StartRecording(CameraSize aSize, nsICameraStartRecordingCallback* onSuccess, nsICameraErrorCallback* onError);
+  nsresult StartRecording(nsIDOMDeviceStorage* aStorageArea, const nsAString& aFilename, nsICameraStartRecordingCallback* onSuccess, nsICameraErrorCallback* onError);
   nsresult StopRecording();
+  nsresult GetPreviewStreamVideoMode(CameraRecordingOptions* aOptions, nsICameraPreviewStreamCallback* onSuccess, nsICameraErrorCallback* onError);
 
   nsresult Set(uint32_t aKey, const nsAString& aValue);
   nsresult Get(uint32_t aKey, nsAString& aValue);
   nsresult Set(uint32_t aKey, double aValue);
   nsresult Get(uint32_t aKey, double* aValue);
   nsresult Set(JSContext* aCx, uint32_t aKey, const JS::Value& aValue, uint32_t aLimit);
   nsresult Get(JSContext* aCx, uint32_t aKey, JS::Value* aValue);
 
@@ -106,16 +110,17 @@ protected:
   virtual nsresult StartPreviewImpl(StartPreviewTask* aStartPreview) = 0;
   virtual nsresult StopPreviewImpl(StopPreviewTask* aStopPreview) = 0;
   virtual nsresult AutoFocusImpl(AutoFocusTask* aAutoFocus) = 0;
   virtual nsresult TakePictureImpl(TakePictureTask* aTakePicture) = 0;
   virtual nsresult StartRecordingImpl(StartRecordingTask* aStartRecording) = 0;
   virtual nsresult StopRecordingImpl(StopRecordingTask* aStopRecording) = 0;
   virtual nsresult PushParametersImpl() = 0;
   virtual nsresult PullParametersImpl() = 0;
+  virtual nsresult GetPreviewStreamVideoModeImpl(GetPreviewStreamVideoModeTask* aGetPreviewStreamVideoMode) = 0;
 
   uint32_t            mCameraId;
   nsCOMPtr<nsIThread> mCameraThread;
   nsString            mFileFormat;
   uint32_t            mMaxMeteringAreas;
   uint32_t            mMaxFocusAreas;
 
   /**
@@ -342,71 +347,74 @@ public:
   nsCOMPtr<nsICameraTakePictureCallback> mOnSuccessCb;
   nsCOMPtr<nsICameraErrorCallback> mOnErrorCb;
 };
 
 // Return the captured video to JS.  Runs on the main thread.
 class StartRecordingResult : public nsRunnable
 {
 public:
-  StartRecordingResult(nsIDOMMediaStream* aStream, nsICameraStartRecordingCallback* onSuccess)
-    : mStream(aStream)
-    , mOnSuccessCb(onSuccess)
+  StartRecordingResult(nsICameraStartRecordingCallback* onSuccess)
+    : mOnSuccessCb(onSuccess)
   { }
 
   virtual ~StartRecordingResult() { }
 
   NS_IMETHOD Run()
   {
     MOZ_ASSERT(NS_IsMainThread());
 
     if (mOnSuccessCb) {
-      mOnSuccessCb->HandleEvent(mStream);
+      mOnSuccessCb->HandleEvent();
     }
     return NS_OK;
   }
 
 protected:
-  nsCOMPtr<nsIDOMMediaStream> mStream;
   nsCOMPtr<nsICameraStartRecordingCallback> mOnSuccessCb;
 };
 
 // Start video recording.
 class StartRecordingTask : public nsRunnable
 {
 public:
-  StartRecordingTask(CameraControlImpl* aCameraControl, CameraSize aSize, nsICameraStartRecordingCallback* onSuccess, nsICameraErrorCallback* onError)
-    : mSize(aSize)
-    , mCameraControl(aCameraControl)
+  StartRecordingTask(CameraControlImpl* aCameraControl, nsIDOMDeviceStorage* aStorageArea, const nsAString& aFilename, nsICameraStartRecordingCallback* onSuccess, nsICameraErrorCallback* onError)
+    : mCameraControl(aCameraControl)
+    , mStorageArea(aStorageArea)
+    , mFilename(aFilename)
     , mOnSuccessCb(onSuccess)
     , mOnErrorCb(onError)
   {
     DOM_CAMERA_LOGT("%s:%d : this=%p\n", __func__, __LINE__, this);
   }
 
   virtual ~StartRecordingTask()
   {
     DOM_CAMERA_LOGT("%s:%d : this=%p\n", __func__, __LINE__, this);
   }
 
   NS_IMETHOD Run()
   {
     DOM_CAMERA_LOGT("%s:%d\n", __func__, __LINE__);
     nsresult rv = mCameraControl->StartRecordingImpl(this);
-    DOM_CAMERA_LOGT("%s:%d\n", __func__, __LINE__);
+    DOM_CAMERA_LOGT("%s:%d : result %d\n", __func__, __LINE__, rv);
 
-    if (NS_FAILED(rv) && mOnErrorCb) {
+    if (NS_SUCCEEDED(rv)) {
+      if (mOnSuccessCb) {
+        rv = NS_DispatchToMainThread(new StartRecordingResult(mOnSuccessCb));
+      }
+    } else if (mOnErrorCb) {
       rv = NS_DispatchToMainThread(new CameraErrorResult(mOnErrorCb, NS_LITERAL_STRING("FAILURE")));
-      NS_ENSURE_SUCCESS(rv, rv);
     }
     return rv;
   }
 
-  CameraSize mSize;
   nsRefPtr<CameraControlImpl> mCameraControl;
+  nsCOMPtr<nsIDOMDeviceStorage> mStorageArea;
+  nsString mFilename;
   nsCOMPtr<nsICameraStartRecordingCallback> mOnSuccessCb;
   nsCOMPtr<nsICameraErrorCallback> mOnErrorCb;
 };
 
 // Stop video recording.
 class StopRecordingTask : public nsRunnable
 {
 public:
@@ -486,11 +494,72 @@ public:
     DOM_CAMERA_LOGT("%s:%d\n", __func__, __LINE__);
 
     return NS_OK;
   }
 
   nsRefPtr<CameraControlImpl> mCameraControl;
 };
 
+// Return the resulting preview stream to JS.  Runs on the main thread.
+class GetPreviewStreamVideoModeResult : public nsRunnable
+{
+public:
+  GetPreviewStreamVideoModeResult(nsIDOMMediaStream* aStream, nsICameraPreviewStreamCallback* onSuccess)
+     : mStream(aStream)
+     , mOnSuccessCb(onSuccess)
+  {
+    DOM_CAMERA_LOGT("%s:%d : this=%p\n", __func__, __LINE__, this);
+  }
+
+  virtual ~GetPreviewStreamVideoModeResult()
+  {
+    DOM_CAMERA_LOGT("%s:%d : this=%p\n", __func__, __LINE__, this);
+  }
+
+  NS_IMETHOD Run()
+  {
+    MOZ_ASSERT(NS_IsMainThread());
+
+    if (mOnSuccessCb) {
+      mOnSuccessCb->HandleEvent(mStream);
+    }
+    return NS_OK;
+  }
+
+protected:
+  nsCOMPtr<nsIDOMMediaStream> mStream;
+  nsCOMPtr<nsICameraPreviewStreamCallback> mOnSuccessCb;
+};
+
+// Get the video mode preview stream.
+class GetPreviewStreamVideoModeTask : public nsRunnable
+{
+public:
+  GetPreviewStreamVideoModeTask(CameraControlImpl* aCameraControl, CameraRecordingOptions aOptions,  nsICameraPreviewStreamCallback* onSuccess, nsICameraErrorCallback* onError)
+    : mCameraControl(aCameraControl)
+    , mOptions(aOptions)
+    , mOnSuccessCb(onSuccess)
+    , mOnErrorCb(onError)
+  { }
+
+  NS_IMETHOD Run()
+  {
+    DOM_CAMERA_LOGI("%s:%d -- BEFORE IMPL\n", __func__, __LINE__);
+    nsresult rv = mCameraControl->GetPreviewStreamVideoModeImpl(this);
+    DOM_CAMERA_LOGI("%s:%d -- AFTER IMPL : rv = %d\n", __func__, __LINE__, rv);
+
+    if (NS_FAILED(rv) && mOnErrorCb) {
+      rv = NS_DispatchToMainThread(new CameraErrorResult(mOnErrorCb, NS_LITERAL_STRING("FAILURE")));
+      NS_ENSURE_SUCCESS(rv, rv);
+    }
+    return NS_OK;
+  }
+
+  nsRefPtr<CameraControlImpl> mCameraControl;
+  CameraRecordingOptions mOptions;
+  nsCOMPtr<nsICameraPreviewStreamCallback> mOnSuccessCb;
+  nsCOMPtr<nsICameraErrorCallback> mOnErrorCb;
+};
+
 } // namespace mozilla
 
 #endif // DOM_CAMERA_CAMERACONTROLIMPL_H
--- a/dom/camera/DOMCameraControl.cpp
+++ b/dom/camera/DOMCameraControl.cpp
@@ -4,16 +4,17 @@
 
 #include "base/basictypes.h"
 #include "nsCOMPtr.h"
 #include "nsDOMClassInfo.h"
 #include "jsapi.h"
 #include "nsThread.h"
 #include "mozilla/Services.h"
 #include "nsIObserverService.h"
+#include "nsIDOMDeviceStorage.h"
 #include "DOMCameraManager.h"
 #include "DOMCameraCapabilities.h"
 #include "DOMCameraControl.h"
 #include "CameraCommon.h"
 
 using namespace mozilla;
 using namespace dom;
 
@@ -213,37 +214,33 @@ nsDOMCameraControl::GetOnShutter(nsICame
 }
 NS_IMETHODIMP
 nsDOMCameraControl::SetOnShutter(nsICameraShutterCallback* aOnShutter)
 {
   // TODO: see bug 779138.
   return NS_ERROR_NOT_IMPLEMENTED;
 }
 
-/* void startRecording (in jsval aOptions, in nsICameraStartRecordingCallback onSuccess, [optional] in nsICameraErrorCallback onError); */
+/* [implicit_jscontext] void startRecording (in nsIDOMDeviceStorage storageArea, in DOMString filename, in nsICameraStartRecordingCallback onSuccess, [optional] in nsICameraErrorCallback onError); */
 NS_IMETHODIMP
-nsDOMCameraControl::StartRecording(const JS::Value& aOptions, nsICameraStartRecordingCallback* onSuccess, nsICameraErrorCallback* onError, JSContext* cx)
+nsDOMCameraControl::StartRecording(nsIDOMDeviceStorage* storageArea, const nsAString& filename, nsICameraStartRecordingCallback* onSuccess, nsICameraErrorCallback* onError, JSContext* cx)
 {
   NS_ENSURE_TRUE(onSuccess, NS_ERROR_INVALID_ARG);
 
-  CameraSize size;
-  nsresult rv = size.Init(cx, &aOptions);
-  NS_ENSURE_SUCCESS(rv, rv);
-
   nsCOMPtr<nsIObserverService> obs = mozilla::services::GetObserverService();
   if (!obs) {
     NS_WARNING("Could not get the Observer service for CameraControl::StartRecording.");
     return NS_ERROR_FAILURE;
   }
 
   obs->NotifyObservers(nullptr,
                        "recording-device-events",
                        NS_LITERAL_STRING("starting").get());
 
-  return mCameraControl->StartRecording(size, onSuccess, onError);
+  return mCameraControl->StartRecording(storageArea, filename, onSuccess, onError);
 }
 
 /* void stopRecording (); */
 NS_IMETHODIMP
 nsDOMCameraControl::StopRecording()
 {
   nsCOMPtr<nsIObserverService> obs = mozilla::services::GetObserverService();
   if (!obs) {
@@ -311,16 +308,29 @@ nsDOMCameraControl::TakePicture(const JS
   pos.altitude = NAN;
   pos.timestamp = NAN;
   rv = pos.Init(cx, &options.position);
   NS_ENSURE_SUCCESS(rv, rv);
 
   return mCameraControl->TakePicture(size, options.rotation, options.fileFormat, pos, onSuccess, onError);
 }
 
+/* [implicit_jscontext] void GetPreviewStreamVideoMode (in jsval aOptions, in nsICameraPreviewStreamCallback onSuccess, [optional] in nsICameraErrorCallback onError); */
+NS_IMETHODIMP
+nsDOMCameraControl::GetPreviewStreamVideoMode(const JS::Value& aOptions, nsICameraPreviewStreamCallback* onSuccess, nsICameraErrorCallback* onError, JSContext* cx)
+{
+  NS_ENSURE_TRUE(onSuccess, NS_ERROR_INVALID_ARG);
+
+  CameraRecordingOptions options;
+  nsresult rv = options.Init(cx, &aOptions);
+  NS_ENSURE_SUCCESS(rv, rv);
+
+  return mCameraControl->GetPreviewStreamVideoMode(&options, onSuccess, onError);
+}
+
 class GetCameraResult : public nsRunnable
 {
 public:
   GetCameraResult(nsDOMCameraControl* aDOMCameraControl, nsresult aResult, nsICameraGetCameraCallback* onSuccess, nsICameraErrorCallback* onError)
     : mDOMCameraControl(aDOMCameraControl)
     , mResult(aResult)
     , mOnSuccessCb(onSuccess)
     , mOnErrorCb(onError)
--- a/dom/camera/GonkCameraControl.cpp
+++ b/dom/camera/GonkCameraControl.cpp
@@ -10,24 +10,30 @@
  * Unless required by applicable law or agreed to in writing, software
  * distributed under the License is distributed on an "AS IS" BASIS,
  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  * See the License for the specific language governing permissions and
  * limitations under the License.
  */
 
 #include <string.h>
+#include <sys/stat.h>
+#include <fcntl.h>
+#include <errno.h>
+#include <libgen.h>
 #include "base/basictypes.h"
 #include "libcameraservice/CameraHardwareInterface.h"
 #include "camera/CameraParameters.h"
 #include "nsCOMPtr.h"
 #include "nsDOMClassInfo.h"
 #include "nsMemory.h"
 #include "jsapi.h"
 #include "nsThread.h"
+#include <media/MediaProfiles.h>
+#include "nsDirectoryServiceDefs.h" // for NS_GetSpecialDirectory
 #include "nsPrintfCString.h"
 #include "DOMCameraManager.h"
 #include "GonkCameraHwMgr.h"
 #include "DOMCameraCapabilities.h"
 #include "DOMCameraControl.h"
 #include "GonkCameraControl.h"
 #include "CameraCommon.h"
 
@@ -542,30 +548,38 @@ nsGonkCameraControl::StartPreviewImpl(St
 
   if (aStartPreview->mDOMPreview) {
     mDOMPreview->Started();
   }
   return NS_OK;
 }
 
 nsresult
-nsGonkCameraControl::StopPreviewImpl(StopPreviewTask* aStopPreview)
+nsGonkCameraControl::StopPreviewInternal(bool aForced)
 {
   DOM_CAMERA_LOGI("%s: stopping preview\n", __func__);
 
   // StopPreview() is a synchronous call--it doesn't return
   // until the camera preview thread exits.
-  GonkCameraHardware::StopPreview(mHwHandle);
-  mDOMPreview->Stopped();
-  mDOMPreview = nullptr;
+  if (mDOMPreview) {
+    GonkCameraHardware::StopPreview(mHwHandle);
+    mDOMPreview->Stopped(aForced);
+    mDOMPreview = nullptr;
+  }
 
   return NS_OK;
 }
 
 nsresult
+nsGonkCameraControl::StopPreviewImpl(StopPreviewTask* aStopPreview)
+{
+  return StopPreviewInternal();
+}
+
+nsresult
 nsGonkCameraControl::AutoFocusImpl(AutoFocusTask* aAutoFocus)
 {
   nsCOMPtr<nsICameraAutoFocusCallback> cb = mAutoFocusOnSuccessCb;
   if (cb) {
     /**
      * We already have a callback, so someone has already
      * called autoFocus() -- cancel it.
      */
@@ -686,23 +700,81 @@ nsGonkCameraControl::PullParametersImpl(
   RwAutoLockWrite lock(mRwLock);
   GonkCameraHardware::PullParameters(mHwHandle, mParams);
   return NS_OK;
 }
 
 nsresult
 nsGonkCameraControl::StartRecordingImpl(StartRecordingTask* aStartRecording)
 {
-  return NS_ERROR_NOT_IMPLEMENTED;
+  mStartRecordingOnSuccessCb = aStartRecording->mOnSuccessCb;
+  mStartRecordingOnErrorCb = aStartRecording->mOnErrorCb;
+
+  /**
+   * We need to pull in the base path from aStartRecording->mStorageArea
+   * once that feature lands.  See bug 795201.
+   *
+   * For now, we just assume /sdcard/Movies.
+   *
+   * Also, the camera app needs to provide the file extension '.3gp' for now.
+   * See bug 795202.
+   */
+#if 1
+  nsCOMPtr<nsIFile> filename;
+  aStartRecording->mStorageArea->GetRootDirectory(getter_AddRefs(filename));
+  filename->Append(aStartRecording->mFilename);
+
+  nsAutoCString pathname;
+  filename->GetNativePath(pathname);
+#else
+  nsAutoCString pathname(NS_LITERAL_CSTRING("/sdcard/Movies/"));
+  nsAutoCString filename(NS_ConvertUTF16toUTF8(aStartRecording->mFilename));
+
+  // Make sure that the file name doesn't contain any directory components.
+  if (strcmp(filename.get(), basename(filename.get())) != 0) {
+    DOM_CAMERA_LOGE("Video filename '%s' is not valid\n", filename.get());
+    return NS_ERROR_INVALID_ARG;
+  }
+
+  pathname.Append(filename);
+#endif
+  DOM_CAMERA_LOGI("Video pathname is '%s'\n", pathname.get());
+  int fd = open(pathname.get(), O_RDWR | O_CREAT, 0644);
+  if (fd < 0) {
+    DOM_CAMERA_LOGE("Couldn't create file '%s' with error (%d) %s\n", pathname.get(), errno, strerror(errno));
+    return NS_ERROR_FAILURE;
+  }
+
+  if (SetupRecording(fd) != NS_OK) {
+    DOM_CAMERA_LOGE("SetupRecording() failed\n");
+    close(fd);
+    return NS_ERROR_FAILURE;
+  }
+  if (mRecorder->start() != OK) {
+    DOM_CAMERA_LOGE("mRecorder->start() failed\n");
+    close(fd);
+    return NS_ERROR_FAILURE;
+  }
+
+  // dispatch the callback
+  nsCOMPtr<nsIRunnable> startRecordingResult = new StartRecordingResult(mStartRecordingOnSuccessCb);
+  nsresult rv = NS_DispatchToMainThread(startRecordingResult);
+  if (NS_FAILED(rv)) {
+    DOM_CAMERA_LOGE("Failed to dispatch start recording result to main thread (%d)!", rv);
+  }
+  return NS_OK;
 }
 
 nsresult
 nsGonkCameraControl::StopRecordingImpl(StopRecordingTask* aStopRecording)
 {
-  return NS_ERROR_NOT_IMPLEMENTED;
+  mRecorder->stop();
+  delete mRecorder;
+  mRecorder = nullptr;
+  return NS_OK;
 }
 
 void
 nsGonkCameraControl::AutoFocusComplete(bool aSuccess)
 {
   /**
    * Auto focusing can change some of the camera's parameters, so
    * we need to pull a new set before sending the result to the
@@ -804,16 +876,162 @@ nsGonkCameraControl::SetPreviewSize(uint
   }
 
   mWidth = bestWidth;
   mHeight = bestHeight;
   mParams.setPreviewSize(mWidth, mHeight);
   PushParameters();
 }
 
+nsresult
+nsGonkCameraControl::SetupVideoMode()
+{
+  // read preferences for camcorder
+  mMediaProfiles = MediaProfiles::getInstance();
+
+  /**
+   * Right now default to profile 3, which is 352x288 on Otoro.  In the
+   * future, allow the application to select a recording quality and
+   * configuration.
+   *
+   * See bug 795379.
+   */
+  int quality = 3;  // cif:352x288
+  camcorder_quality q = static_cast<camcorder_quality>(quality);
+  mDuration         = mMediaProfiles->getCamcorderProfileParamByName("duration",    (int)mCameraId, q);
+  mVideoFileFormat  = mMediaProfiles->getCamcorderProfileParamByName("file.format", (int)mCameraId, q);
+  mVideoCodec       = mMediaProfiles->getCamcorderProfileParamByName("vid.codec",   (int)mCameraId, q);
+  mVideoBitRate     = mMediaProfiles->getCamcorderProfileParamByName("vid.bps",     (int)mCameraId, q);
+  mVideoFrameRate   = mMediaProfiles->getCamcorderProfileParamByName("vid.fps",     (int)mCameraId, q);
+  mVideoFrameWidth  = mMediaProfiles->getCamcorderProfileParamByName("vid.width",   (int)mCameraId, q);
+  mVideoFrameHeight = mMediaProfiles->getCamcorderProfileParamByName("vid.height",  (int)mCameraId, q);
+  mAudioCodec       = mMediaProfiles->getCamcorderProfileParamByName("aud.codec",   (int)mCameraId, q);
+  mAudioBitRate     = mMediaProfiles->getCamcorderProfileParamByName("aud.bps",     (int)mCameraId, q);
+  mAudioSampleRate  = mMediaProfiles->getCamcorderProfileParamByName("aud.hz",      (int)mCameraId, q);
+  mAudioChannels    = mMediaProfiles->getCamcorderProfileParamByName("aud.ch",      (int)mCameraId, q);
+
+  if (mVideoFrameRate == -1) {
+    DOM_CAMERA_LOGE("Failed to get a valid frame rate!\n");
+    DOM_CAMERA_LOGE("Also got width=%d, height=%d\n", mVideoFrameWidth, mVideoFrameHeight);
+    return NS_ERROR_FAILURE;
+  }
+
+  PullParametersImpl();
+
+  // Configure camera video recording parameters.
+  const size_t SIZE = 256;
+  char buffer[SIZE];
+
+  /**
+   * Ignore the width and height settings from app, just use the one in profile.
+   * Eventually, will try to choose a profile which respects the settings from app.
+   * See bug 795330.
+   */
+  mParams.setPreviewSize(mVideoFrameWidth, mVideoFrameHeight);
+  mParams.setPreviewFrameRate(mVideoFrameRate);
+  snprintf(buffer, SIZE, "%dx%d", mVideoFrameWidth, mVideoFrameHeight);
+
+  /**
+   * "record-size" is probably deprecated in later ICS;
+   * might need to set "video-size" instead of "record-size".
+   * See bug 795332.
+   */
+  mParams.set("record-size", buffer);
+
+  /**
+   * If we want to enable picture-taking _while_ recording video, this sets the
+   * size of the captured picture.  For now, just set it to the same dimensions
+   * as the video we're recording; ideally, we should probably make sure it
+   * matches one of the supported picture sizes.
+   */
+  mParams.setPictureSize(mVideoFrameWidth, mVideoFrameHeight);
+
+  PushParametersImpl();
+  return NS_OK;
+}
+
+#ifndef CHECK_SETARG
+#define CHECK_SETARG(x)                 \
+  do {                                  \
+    if (x) {                            \
+      DOM_CAMERA_LOGE(#x " failed\n");  \
+      return NS_ERROR_INVALID_ARG;      \
+    }                                   \
+  } while(0)
+#endif
+
+nsresult
+nsGonkCameraControl::SetupRecording(int aFd)
+{
+  // choosing a size big enough to hold the params
+  const size_t SIZE = 256;
+  char buffer[SIZE];
+
+  mRecorder = new GonkRecorder();
+  CHECK_SETARG(mRecorder->init());
+
+  // set all the params
+  CHECK_SETARG(mRecorder->setCameraHandle((int32_t)mHwHandle));
+  CHECK_SETARG(mRecorder->setAudioSource(AUDIO_SOURCE_CAMCORDER));
+  CHECK_SETARG(mRecorder->setVideoSource(VIDEO_SOURCE_CAMERA));
+  CHECK_SETARG(mRecorder->setOutputFormat((output_format)mVideoFileFormat));
+  CHECK_SETARG(mRecorder->setVideoFrameRate(mVideoFrameRate));
+  CHECK_SETARG(mRecorder->setVideoSize(mVideoFrameWidth, mVideoFrameHeight));
+  snprintf(buffer, SIZE, "video-param-encoding-bitrate=%d", mVideoBitRate);
+  CHECK_SETARG(mRecorder->setParameters(String8(buffer)));
+  CHECK_SETARG(mRecorder->setVideoEncoder((video_encoder)mVideoCodec));
+  snprintf(buffer, SIZE, "audio-param-encoding-bitrate=%d", mAudioBitRate);
+  CHECK_SETARG(mRecorder->setParameters(String8(buffer)));
+  snprintf(buffer, SIZE, "audio-param-number-of-channels=%d", mAudioChannels);
+  CHECK_SETARG(mRecorder->setParameters(String8(buffer)));
+  snprintf(buffer, SIZE, "audio-param-sampling-rate=%d", mAudioSampleRate);
+  CHECK_SETARG(mRecorder->setParameters(String8(buffer)));
+  CHECK_SETARG(mRecorder->setAudioEncoder((audio_encoder)mAudioCodec));
+  // TODO: For now there is no limit on recording duration (See bug 795090)
+  CHECK_SETARG(mRecorder->setParameters(String8("max-duration=-1")));
+  // TODO: For now there is no limit on file size (See bug 795090)
+  CHECK_SETARG(mRecorder->setParameters(String8("max-filesize=-1")));
+  snprintf(buffer, SIZE, "video-param-rotation-angle-degrees=%d", mVideoRotation);
+  CHECK_SETARG(mRecorder->setParameters(String8(buffer)));
+
+  // recording API needs file descriptor of output file
+  CHECK_SETARG(mRecorder->setOutputFile(aFd, 0, 0));
+  CHECK_SETARG(mRecorder->prepare());
+  return NS_OK;
+}
+
+nsresult
+nsGonkCameraControl::GetPreviewStreamVideoModeImpl(GetPreviewStreamVideoModeTask* aGetPreviewStreamVideoMode)
+{
+  nsCOMPtr<GetPreviewStreamResult> getPreviewStreamResult = nullptr;
+
+  // stop any currently running preview
+  StopPreviewInternal(true /* forced */);
+
+  // copy the recording preview options
+  mVideoRotation = aGetPreviewStreamVideoMode->mOptions.rotation;
+  mVideoWidth = aGetPreviewStreamVideoMode->mOptions.width;
+  mVideoHeight = aGetPreviewStreamVideoMode->mOptions.height;
+  DOM_CAMERA_LOGI("recording preview format: %d x %d (w x h) (rotated %d degrees)\n", mVideoWidth, mVideoHeight, mVideoRotation);
+
+  // setup the video mode
+  nsresult rv = SetupVideoMode();
+  NS_ENSURE_SUCCESS(rv, rv);
+
+  // create and return new preview stream object
+  getPreviewStreamResult = new GetPreviewStreamResult(this, mVideoWidth, mVideoHeight, mVideoFrameRate, aGetPreviewStreamVideoMode->mOnSuccessCb);
+  rv = NS_DispatchToMainThread(getPreviewStreamResult);
+  if (NS_FAILED(rv)) {
+    NS_WARNING("Failed to dispatch GetPreviewStreamVideoMode() onSuccess callback to main thread!");
+    return rv;
+  }
+
+  return NS_OK;
+}
+
 // Gonk callback handlers.
 namespace mozilla {
 
 void
 ReceiveImage(nsGonkCameraControl* gc, uint8_t* aData, uint32_t aLength)
 {
   gc->TakePictureComplete(aData, aLength);
 }
--- a/dom/camera/GonkCameraControl.h
+++ b/dom/camera/GonkCameraControl.h
@@ -19,16 +19,17 @@
 
 #include "base/basictypes.h"
 #include "prtypes.h"
 #include "prrwlock.h"
 #include "nsIDOMCameraManager.h"
 #include "DOMCameraControl.h"
 #include "CameraControlImpl.h"
 #include "CameraCommon.h"
+#include "GonkRecorder.h"
 
 namespace mozilla {
 
 namespace layers {
 class GraphicBufferLocked;
 }
 
 class nsGonkCameraControl : public CameraControlImpl
@@ -42,31 +43,36 @@ public:
   double GetParameterDouble(uint32_t aKey);
   void GetParameter(uint32_t aKey, nsTArray<dom::CameraRegion>& aRegions);
   void SetParameter(const char* aKey, const char* aValue);
   void SetParameter(uint32_t aKey, const char* aValue);
   void SetParameter(uint32_t aKey, double aValue);
   void SetParameter(uint32_t aKey, const nsTArray<dom::CameraRegion>& aRegions);
   nsresult PushParameters();
 
+  nsresult SetupRecording(int aFd);
+  nsresult SetupVideoMode();
+
   void AutoFocusComplete(bool aSuccess);
   void TakePictureComplete(uint8_t* aData, uint32_t aLength);
 
 protected:
   ~nsGonkCameraControl();
 
   nsresult GetPreviewStreamImpl(GetPreviewStreamTask* aGetPreviewStream);
   nsresult StartPreviewImpl(StartPreviewTask* aStartPreview);
   nsresult StopPreviewImpl(StopPreviewTask* aStopPreview);
+  nsresult StopPreviewInternal(bool aForced = false);
   nsresult AutoFocusImpl(AutoFocusTask* aAutoFocus);
   nsresult TakePictureImpl(TakePictureTask* aTakePicture);
   nsresult StartRecordingImpl(StartRecordingTask* aStartRecording);
   nsresult StopRecordingImpl(StopRecordingTask* aStopRecording);
   nsresult PushParametersImpl();
   nsresult PullParametersImpl();
+  nsresult GetPreviewStreamVideoModeImpl(GetPreviewStreamVideoModeTask* aGetPreviewStreamVideoMode);
 
   void SetPreviewSize(uint32_t aWidth, uint32_t aHeight);
 
   uint32_t                  mHwHandle;
   double                    mExposureCompensationMin;
   double                    mExposureCompensationStep;
   bool                      mDeferConfigUpdate;
   PRRWLock*                 mRwLock;
@@ -79,16 +85,37 @@ protected:
     PREVIEW_FORMAT_YUV420P,
     PREVIEW_FORMAT_YUV420SP
   };
   uint32_t                  mFormat;
 
   uint32_t                  mFps;
   uint32_t                  mDiscardedFrameCount;
 
+  android::MediaProfiles*   mMediaProfiles;
+  android::GonkRecorder*    mRecorder;
+
+  PRUint32                  mVideoRotation;
+  PRUint32                  mVideoWidth;
+  PRUint32                  mVideoHeight;
+  nsString                  mVideoFile;
+
+  // camcorder profile settings for the desired quality level
+  int mDuration;        // max recording duration (ignored)
+  int mVideoFileFormat; // output file format
+  int mVideoCodec;      // video encoder
+  int mVideoBitRate;    // video bit rate
+  int mVideoFrameRate;  // video frame rate
+  int mVideoFrameWidth; // video frame width
+  int mVideoFrameHeight;// video frame height
+  int mAudioCodec;      // audio encoder
+  int mAudioBitRate;    // audio bit rate
+  int mAudioSampleRate; // audio sample rate
+  int mAudioChannels;   // number of audio channels
+
 private:
   nsGonkCameraControl(const nsGonkCameraControl&) MOZ_DELETE;
   nsGonkCameraControl& operator=(const nsGonkCameraControl&) MOZ_DELETE;
 };
 
 // camera driver callbacks
 void ReceiveImage(nsGonkCameraControl* gc, uint8_t* aData, uint32_t aLength);
 void AutoFocusComplete(nsGonkCameraControl* gc, bool aSuccess);
--- a/dom/camera/GonkCameraHwMgr.cpp
+++ b/dom/camera/GonkCameraHwMgr.cpp
@@ -139,16 +139,44 @@ GonkCameraHardware::NotifyCallback(int32
 
     default:
       DOM_CAMERA_LOGE("Unhandled notify callback event %d\n", aMsgType);
       break;
   }
 }
 
 void
+GonkCameraHardware::DataCallbackTimestamp(nsecs_t aTimestamp, int32_t aMsgType, const sp<IMemory> &aDataPtr, void* aUser)
+{
+  DOM_CAMERA_LOGI("%s",__func__);
+  GonkCameraHardware* hw = GetHardware((uint32_t)aUser);
+  if (!hw) {
+    DOM_CAMERA_LOGE("%s:aUser = %d resolved to no camera hw\n", __func__, (uint32_t)aUser);
+    return;
+  }
+  if (hw->mClosing) {
+    return;
+  }
+
+  sp<GonkCameraListener> listener;
+  {
+    //TODO
+    //Mutex::Autolock _l(hw->mLock);
+    listener = hw->mListener;
+  }
+  if (listener.get()) {
+    DOM_CAMERA_LOGI("Listener registered, posting recording frame!");
+    listener->postDataTimestamp(aTimestamp, aMsgType, aDataPtr);
+  } else {
+    DOM_CAMERA_LOGW("No listener was set. Drop a recording frame.");
+    hw->mHardware->releaseRecordingFrame(aDataPtr);
+  }
+}
+
+void
 GonkCameraHardware::Init()
 {
   DOM_CAMERA_LOGT("%s: this=%p\n", __func__, (void* )this);
 
   if (hw_get_module(CAMERA_HARDWARE_MODULE_ID, (const hw_module_t**)&mModule) < 0) {
     return;
   }
   char cameraDeviceName[4];
@@ -157,17 +185,17 @@ GonkCameraHardware::Init()
   if (mHardware->initialize(&mModule->common) != OK) {
     mHardware.clear();
     return;
   }
 
   if (sHwHandle == 0) {
     sHwHandle = 1;  // don't use 0
   }
-  mHardware->setCallbacks(GonkCameraHardware::NotifyCallback, GonkCameraHardware::DataCallback, NULL, (void*)sHwHandle);
+  mHardware->setCallbacks(GonkCameraHardware::NotifyCallback, GonkCameraHardware::DataCallback, GonkCameraHardware::DataCallbackTimestamp, (void*)sHwHandle);
   mInitialized = true;
 }
 
 GonkCameraHardware::~GonkCameraHardware()
 {
   DOM_CAMERA_LOGT( "%s:%d : this=%p\n", __func__, __LINE__, (void*)this );
   sHw = nullptr;
 }
@@ -309,8 +337,86 @@ GonkCameraHardware::StartPreview(uint32_
 void
 GonkCameraHardware::StopPreview(uint32_t aHwHandle)
 {
   GonkCameraHardware* hw = GetHardware(aHwHandle);
   if (hw) {
     hw->mHardware->stopPreview();
   }
 }
+
+int
+GonkCameraHardware::StartRecording(uint32_t aHwHandle)
+{
+  DOM_CAMERA_LOGI("%s: aHwHandle = %d\n", __func__, aHwHandle);
+  int rv = OK;
+  GonkCameraHardware* hw = GetHardware(aHwHandle);
+  if (!hw) {
+    return DEAD_OBJECT;
+  }
+
+  if (hw->mHardware->recordingEnabled()) {
+    return OK;
+  }
+
+  if (!hw->mHardware->previewEnabled()) {
+    DOM_CAMERA_LOGW("Preview was not enabled, enabling now!\n");
+    rv = StartPreview(aHwHandle);
+    if (rv != OK) {
+      return rv;
+    }
+  }
+
+  // start recording mode
+  hw->mHardware->enableMsgType(CAMERA_MSG_VIDEO_FRAME);
+  DOM_CAMERA_LOGI("Calling hw->startRecording\n");
+  rv = hw->mHardware->startRecording();
+  if (rv != OK) {
+    DOM_CAMERA_LOGE("mHardware->startRecording() failed with status %d", rv);
+  }
+  return rv;
+}
+
+int
+GonkCameraHardware::StopRecording(uint32_t aHwHandle)
+{
+  DOM_CAMERA_LOGI("%s: aHwHandle = %d\n", __func__, aHwHandle);
+  GonkCameraHardware* hw = GetHardware(aHwHandle);
+  if (!hw) {
+    return DEAD_OBJECT;
+  }
+
+  hw->mHardware->disableMsgType(CAMERA_MSG_VIDEO_FRAME);
+  hw->mHardware->stopRecording();
+  return OK;
+}
+
+int
+GonkCameraHardware::SetListener(uint32_t aHwHandle, const sp<GonkCameraListener>& aListener)
+{
+  GonkCameraHardware* hw = GetHardware(aHwHandle);
+  if (!hw) {
+    return DEAD_OBJECT;
+  }
+
+  hw->mListener = aListener;
+  return OK;
+}
+
+void
+GonkCameraHardware::ReleaseRecordingFrame(uint32_t aHwHandle, const sp<IMemory>& aFrame)
+{
+  GonkCameraHardware* hw = GetHardware(aHwHandle);
+  if (hw) {
+    hw->mHardware->releaseRecordingFrame(aFrame);
+  }
+}
+
+int
+GonkCameraHardware::StoreMetaDataInBuffers(uint32_t aHwHandle, bool aEnabled)
+{
+  GonkCameraHardware* hw = GetHardware(aHwHandle);
+  if (!hw) {
+    return DEAD_OBJECT;
+  }
+
+  return hw->mHardware->storeMetaDataInBuffers(aEnabled);
+}
--- a/dom/camera/GonkCameraHwMgr.h
+++ b/dom/camera/GonkCameraHwMgr.h
@@ -15,16 +15,18 @@
  */
 
 #ifndef DOM_CAMERA_GONKCAMERAHWMGR_H
 #define DOM_CAMERA_GONKCAMERAHWMGR_H
 
 #include "libcameraservice/CameraHardwareInterface.h"
 #include "binder/IMemory.h"
 #include "mozilla/ReentrantMonitor.h"
+#include "GonkCameraListener.h"
+#include <utils/threads.h>
 
 #include "GonkCameraControl.h"
 #include "CameraCommon.h"
 
 #include "GonkNativeWindow.h"
 
 // config
 #define GIHM_TIMING_RECEIVEFRAME    0
@@ -41,30 +43,36 @@ class GonkCameraHardware : GonkNativeWin
 {
 protected:
   GonkCameraHardware(GonkCamera* aTarget, uint32_t aCamera);
   ~GonkCameraHardware();
   void Init();
 
   static void     DataCallback(int32_t aMsgType, const sp<IMemory> &aDataPtr, camera_frame_metadata_t* aMetadata, void* aUser);
   static void     NotifyCallback(int32_t aMsgType, int32_t ext1, int32_t ext2, void* aUser);
+  static void     DataCallbackTimestamp(nsecs_t aTimestamp, int32_t aMsgType, const sp<IMemory>& aDataPtr, void* aUser);
 
 public:
   virtual void    OnNewFrame() MOZ_OVERRIDE;
 
   static void     ReleaseHandle(uint32_t aHwHandle);
   static uint32_t GetHandle(GonkCamera* aTarget, uint32_t aCamera);
   static int      AutoFocus(uint32_t aHwHandle);
   static void     CancelAutoFocus(uint32_t aHwHandle);
   static int      TakePicture(uint32_t aHwHandle);
   static void     CancelTakePicture(uint32_t aHwHandle);
   static int      StartPreview(uint32_t aHwHandle);
   static void     StopPreview(uint32_t aHwHandle);
   static int      PushParameters(uint32_t aHwHandle, const CameraParameters& aParams);
   static void     PullParameters(uint32_t aHwHandle, CameraParameters& aParams);
+  static int      StartRecording(uint32_t aHwHandle);
+  static int      StopRecording(uint32_t aHwHandle);
+  static int      SetListener(uint32_t aHwHandle, const sp<GonkCameraListener>& aListener);
+  static void     ReleaseRecordingFrame(uint32_t aHwHandle, const sp<IMemory>& aFrame);
+  static int      StoreMetaDataInBuffers(uint32_t aHwHandle, bool aEnabled);
 
 protected:
   static GonkCameraHardware*    sHw;
   static uint32_t               sHwHandle;
 
   static GonkCameraHardware*    GetHardware(uint32_t aHwHandle)
   {
     if (aHwHandle == sHwHandle) {
@@ -88,16 +96,17 @@ protected:
   sp<CameraHardwareInterface>   mHardware;
   GonkCamera*                   mTarget;
   camera_module_t*              mModule;
   sp<ANativeWindow>             mWindow;
 #if GIHM_TIMING_OVERALL
   struct timespec               mStart;
   struct timespec               mAutoFocusStart;
 #endif
+  sp<GonkCameraListener>        mListener;
   bool                          mInitialized;
 
   bool IsInitialized()
   {
     return mInitialized;
   }
 
 private:
new file mode 100644
--- /dev/null
+++ b/dom/camera/GonkCameraListener.h
@@ -0,0 +1,37 @@
+/*
+ * Copyright (C) 2008 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef GONK_CAMERA_LISTENER_H
+#define GONK_CAMERA_LISTENER_H
+
+#include <utils/Timers.h>
+#include "libcameraservice/CameraHardwareInterface.h"
+
+namespace android {
+
+// ref-counted object for callbacks
+class GonkCameraListener: virtual public RefBase
+{
+public:
+    virtual void notify(int32_t msgType, int32_t ext1, int32_t ext2) = 0;
+    virtual void postData(int32_t msgType, const sp<IMemory>& dataPtr,
+                          camera_frame_metadata_t *metadata) = 0;
+    virtual void postDataTimestamp(nsecs_t timestamp, int32_t msgType, const sp<IMemory>& dataPtr) = 0;
+};
+
+}; // namespace android
+
+#endif
new file mode 100644
--- /dev/null
+++ b/dom/camera/GonkCameraSource.cpp
@@ -0,0 +1,733 @@
+/*
+ * Copyright (C) 2009 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include <base/basictypes.h>
+#include "nsDebug.h"
+#define DOM_CAMERA_LOG_LEVEL        3
+#include "CameraCommon.h"
+#define LOGD DOM_CAMERA_LOGA
+#define LOGV DOM_CAMERA_LOGI
+#define LOGI DOM_CAMERA_LOGI
+#define LOGW DOM_CAMERA_LOGW
+#define LOGE DOM_CAMERA_LOGE
+
+#include <OMX_Component.h>
+#include "GonkCameraSource.h"
+#include "GonkCameraListener.h"
+#include "GonkCameraHwMgr.h"
+#include <media/stagefright/MediaDebug.h>
+#include <media/stagefright/MediaDefs.h>
+#include <media/stagefright/MediaErrors.h>
+#include <media/stagefright/MetaData.h>
+#include <utils/String8.h>
+#include <cutils/properties.h>
+
+using namespace mozilla;
+namespace android {
+
+static const int64_t CAMERA_SOURCE_TIMEOUT_NS = 3000000000LL;
+
+struct GonkCameraSourceListener : public GonkCameraListener {
+    GonkCameraSourceListener(const sp<GonkCameraSource> &source);
+
+    virtual void notify(int32_t msgType, int32_t ext1, int32_t ext2);
+    virtual void postData(int32_t msgType, const sp<IMemory> &dataPtr,
+                          camera_frame_metadata_t *metadata);
+
+    virtual void postDataTimestamp(
+            nsecs_t timestamp, int32_t msgType, const sp<IMemory>& dataPtr);
+
+protected:
+    virtual ~GonkCameraSourceListener();
+
+private:
+    wp<GonkCameraSource> mSource;
+
+    GonkCameraSourceListener(const GonkCameraSourceListener &);
+    GonkCameraSourceListener &operator=(const GonkCameraSourceListener &);
+};
+
+GonkCameraSourceListener::GonkCameraSourceListener(const sp<GonkCameraSource> &source)
+    : mSource(source) {
+}
+
+GonkCameraSourceListener::~GonkCameraSourceListener() {
+}
+
+void GonkCameraSourceListener::notify(int32_t msgType, int32_t ext1, int32_t ext2) {
+    LOGV("notify(%d, %d, %d)", msgType, ext1, ext2);
+}
+
+void GonkCameraSourceListener::postData(int32_t msgType, const sp<IMemory> &dataPtr,
+                                    camera_frame_metadata_t *metadata) {
+    LOGV("postData(%d, ptr:%p, size:%d)",
+         msgType, dataPtr->pointer(), dataPtr->size());
+
+    sp<GonkCameraSource> source = mSource.promote();
+    if (source.get() != NULL) {
+        source->dataCallback(msgType, dataPtr);
+    }
+}
+
+void GonkCameraSourceListener::postDataTimestamp(
+        nsecs_t timestamp, int32_t msgType, const sp<IMemory>& dataPtr) {
+
+    sp<GonkCameraSource> source = mSource.promote();
+    if (source.get() != NULL) {
+        source->dataCallbackTimestamp(timestamp/1000, msgType, dataPtr);
+    }
+}
+
+static int32_t getColorFormat(const char* colorFormat) {
+    return OMX_COLOR_FormatYUV420SemiPlanar;
+
+    if (!strcmp(colorFormat, CameraParameters::PIXEL_FORMAT_YUV420P)) {
+       return OMX_COLOR_FormatYUV420Planar;
+    }
+
+    if (!strcmp(colorFormat, CameraParameters::PIXEL_FORMAT_YUV422SP)) {
+       return OMX_COLOR_FormatYUV422SemiPlanar;
+    }
+
+    if (!strcmp(colorFormat, CameraParameters::PIXEL_FORMAT_YUV420SP)) {
+        return OMX_COLOR_FormatYUV420SemiPlanar;
+    }
+
+    if (!strcmp(colorFormat, CameraParameters::PIXEL_FORMAT_YUV422I)) {
+        return OMX_COLOR_FormatYCbYCr;
+    }
+
+    if (!strcmp(colorFormat, CameraParameters::PIXEL_FORMAT_RGB565)) {
+       return OMX_COLOR_Format16bitRGB565;
+    }
+
+    if (!strcmp(colorFormat, "OMX_TI_COLOR_FormatYUV420PackedSemiPlanar")) {
+       return OMX_TI_COLOR_FormatYUV420PackedSemiPlanar;
+    }
+
+    LOGE("Uknown color format (%s), please add it to "
+         "GonkCameraSource::getColorFormat", colorFormat);
+
+    CHECK_EQ(0, "Unknown color format");
+}
+
+GonkCameraSource *GonkCameraSource::Create(
+    int32_t cameraHandle,
+    Size videoSize,
+    int32_t frameRate,
+    bool storeMetaDataInVideoBuffers) {
+
+    GonkCameraSource *source = new GonkCameraSource(cameraHandle,
+                    videoSize, frameRate,
+                    storeMetaDataInVideoBuffers);
+    return source;
+}
+
+GonkCameraSource::GonkCameraSource(
+    int32_t cameraHandle,
+    Size videoSize,
+    int32_t frameRate,
+    bool storeMetaDataInVideoBuffers)
+    : mCameraFlags(0),
+      mVideoFrameRate(-1),
+      mNumFramesReceived(0),
+      mLastFrameTimestampUs(0),
+      mStarted(false),
+      mNumFramesEncoded(0),
+      mTimeBetweenFrameCaptureUs(0),
+      mFirstFrameTimeUs(0),
+      mNumFramesDropped(0),
+      mNumGlitches(0),
+      mGlitchDurationThresholdUs(200000),
+      mCollectStats(false) {
+    mVideoSize.width  = -1;
+    mVideoSize.height = -1;
+
+    mCameraHandle = cameraHandle;
+
+    mInitCheck = init(
+                    videoSize, frameRate,
+                    storeMetaDataInVideoBuffers);
+    if (mInitCheck != OK) releaseCamera();
+}
+
+status_t GonkCameraSource::initCheck() const {
+    return mInitCheck;
+}
+
+//TODO: Do we need to reimplement isCameraAvailable?
+
+/*
+ * Check to see whether the requested video width and height is one
+ * of the supported sizes.
+ * @param width the video frame width in pixels
+ * @param height the video frame height in pixels
+ * @param suppportedSizes the vector of sizes that we check against
+ * @return true if the dimension (width and height) is supported.
+ */
+static bool isVideoSizeSupported(
+    int32_t width, int32_t height,
+    const Vector<Size>& supportedSizes) {
+
+    LOGV("isVideoSizeSupported");
+    for (size_t i = 0; i < supportedSizes.size(); ++i) {
+        if (width  == supportedSizes[i].width &&
+            height == supportedSizes[i].height) {
+            return true;
+        }
+    }
+    return false;
+}
+
+/*
+ * If the preview and video output is separate, we only set the
+ * the video size, and applications should set the preview size
+ * to some proper value, and the recording framework will not
+ * change the preview size; otherwise, if the video and preview
+ * output is the same, we need to set the preview to be the same
+ * as the requested video size.
+ *
+ */
+/*
+ * Query the camera to retrieve the supported video frame sizes
+ * and also to see whether CameraParameters::setVideoSize()
+ * is supported or not.
+ * @param params CameraParameters to retrieve the information
+ * @@param isSetVideoSizeSupported retunrs whether method
+ *      CameraParameters::setVideoSize() is supported or not.
+ * @param sizes returns the vector of Size objects for the
+ *      supported video frame sizes advertised by the camera.
+ */
+static void getSupportedVideoSizes(
+    const CameraParameters& params,
+    bool *isSetVideoSizeSupported,
+    Vector<Size>& sizes) {
+
+    *isSetVideoSizeSupported = true;
+    params.getSupportedVideoSizes(sizes);
+    if (sizes.size() == 0) {
+        LOGD("Camera does not support setVideoSize()");
+        params.getSupportedPreviewSizes(sizes);
+        *isSetVideoSizeSupported = false;
+    }
+}
+
+/*
+ * Check whether the camera has the supported color format
+ * @param params CameraParameters to retrieve the information
+ * @return OK if no error.
+ */
+status_t GonkCameraSource::isCameraColorFormatSupported(
+        const CameraParameters& params) {
+    mColorFormat = getColorFormat(params.get(
+            CameraParameters::KEY_VIDEO_FRAME_FORMAT));
+    if (mColorFormat == -1) {
+        return BAD_VALUE;
+    }
+    return OK;
+}
+
+/*
+ * Configure the camera to use the requested video size
+ * (width and height) and/or frame rate. If both width and
+ * height are -1, configuration on the video size is skipped.
+ * if frameRate is -1, configuration on the frame rate
+ * is skipped. Skipping the configuration allows one to
+ * use the current camera setting without the need to
+ * actually know the specific values (see Create() method).
+ *
+ * @param params the CameraParameters to be configured
+ * @param width the target video frame width in pixels
+ * @param height the target video frame height in pixels
+ * @param frameRate the target frame rate in frames per second.
+ * @return OK if no error.
+ */
+status_t GonkCameraSource::configureCamera(
+        CameraParameters* params,
+        int32_t width, int32_t height,
+        int32_t frameRate) {
+    LOGV("configureCamera");
+    Vector<Size> sizes;
+    bool isSetVideoSizeSupportedByCamera = true;
+    getSupportedVideoSizes(*params, &isSetVideoSizeSupportedByCamera, sizes);
+    bool isCameraParamChanged = false;
+    if (width != -1 && height != -1) {
+        if (!isVideoSizeSupported(width, height, sizes)) {
+            LOGE("Video dimension (%dx%d) is unsupported", width, height);
+            return BAD_VALUE;
+        }
+        if (isSetVideoSizeSupportedByCamera) {
+            params->setVideoSize(width, height);
+        } else {
+            params->setPreviewSize(width, height);
+        }
+        isCameraParamChanged = true;
+    } else if ((width == -1 && height != -1) ||
+               (width != -1 && height == -1)) {
+        // If one and only one of the width and height is -1
+        // we reject such a request.
+        LOGE("Requested video size (%dx%d) is not supported", width, height);
+        return BAD_VALUE;
+    } else {  // width == -1 && height == -1
+        // Do not configure the camera.
+        // Use the current width and height value setting from the camera.
+    }
+
+    if (frameRate != -1) {
+        CHECK(frameRate > 0 && frameRate <= 120);
+        const char* supportedFrameRates =
+                params->get(CameraParameters::KEY_SUPPORTED_PREVIEW_FRAME_RATES);
+        CHECK(supportedFrameRates != NULL);
+        LOGV("Supported frame rates: %s", supportedFrameRates);
+        char buf[4];
+        snprintf(buf, 4, "%d", frameRate);
+        if (strstr(supportedFrameRates, buf) == NULL) {
+            LOGE("Requested frame rate (%d) is not supported: %s",
+                frameRate, supportedFrameRates);
+            return BAD_VALUE;
+        }
+
+        // The frame rate is supported, set the camera to the requested value.
+        params->setPreviewFrameRate(frameRate);
+        isCameraParamChanged = true;
+    } else {  // frameRate == -1
+        // Do not configure the camera.
+        // Use the current frame rate value setting from the camera
+    }
+
+    if (isCameraParamChanged) {
+        // Either frame rate or frame size needs to be changed.
+        if (OK != GonkCameraHardware::PushParameters(mCameraHandle,*params)) {
+            LOGE("Could not change settings."
+                 " Someone else is using camera ?");
+            return -EBUSY;
+        }
+    }
+    return OK;
+}
+
+/*
+ * Check whether the requested video frame size
+ * has been successfully configured or not. If both width and height
+ * are -1, check on the current width and height value setting
+ * is performed.
+ *
+ * @param params CameraParameters to retrieve the information
+ * @param the target video frame width in pixels to check against
+ * @param the target video frame height in pixels to check against
+ * @return OK if no error
+ */
+status_t GonkCameraSource::checkVideoSize(
+        const CameraParameters& params,
+        int32_t width, int32_t height) {
+
+    LOGV("checkVideoSize");
+    // The actual video size is the same as the preview size
+    // if the camera hal does not support separate video and
+    // preview output. In this case, we retrieve the video
+    // size from preview.
+    int32_t frameWidthActual = -1;
+    int32_t frameHeightActual = -1;
+    Vector<Size> sizes;
+    params.getSupportedVideoSizes(sizes);
+    if (sizes.size() == 0) {
+        // video size is the same as preview size
+        params.getPreviewSize(&frameWidthActual, &frameHeightActual);
+    } else {
+        // video size may not be the same as preview
+        params.getVideoSize(&frameWidthActual, &frameHeightActual);
+    }
+    if (frameWidthActual < 0 || frameHeightActual < 0) {
+        LOGE("Failed to retrieve video frame size (%dx%d)",
+                frameWidthActual, frameHeightActual);
+        return UNKNOWN_ERROR;
+    }
+
+    // Check the actual video frame size against the target/requested
+    // video frame size.
+    if (width != -1 && height != -1) {
+        if (frameWidthActual != width || frameHeightActual != height) {
+            LOGE("Failed to set video frame size to %dx%d. "
+                    "The actual video size is %dx%d ", width, height,
+                    frameWidthActual, frameHeightActual);
+            return UNKNOWN_ERROR;
+        }
+    }
+
+    // Good now.
+    mVideoSize.width = frameWidthActual;
+    mVideoSize.height = frameHeightActual;
+    return OK;
+}
+
+/*
+ * Check the requested frame rate has been successfully configured or not.
+ * If the target frameRate is -1, check on the current frame rate value
+ * setting is performed.
+ *
+ * @param params CameraParameters to retrieve the information
+ * @param the target video frame rate to check against
+ * @return OK if no error.
+ */
+status_t GonkCameraSource::checkFrameRate(
+        const CameraParameters& params,
+        int32_t frameRate) {
+
+    LOGV("checkFrameRate");
+    int32_t frameRateActual = params.getPreviewFrameRate();
+    if (frameRateActual < 0) {
+        LOGE("Failed to retrieve preview frame rate (%d)", frameRateActual);
+        return UNKNOWN_ERROR;
+    }
+
+    // Check the actual video frame rate against the target/requested
+    // video frame rate.
+    if (frameRate != -1 && (frameRateActual - frameRate) != 0) {
+        LOGE("Failed to set preview frame rate to %d fps. The actual "
+                "frame rate is %d", frameRate, frameRateActual);
+        return UNKNOWN_ERROR;
+    }
+
+    // Good now.
+    mVideoFrameRate = frameRateActual;
+    return OK;
+}
+
+/*
+ * Initialize the CameraSource to so that it becomes
+ * ready for providing the video input streams as requested.
+ * @param camera the camera object used for the video source
+ * @param cameraId if camera == 0, use camera with this id
+ *      as the video source
+ * @param videoSize the target video frame size. If both
+ *      width and height in videoSize is -1, use the current
+ *      width and heigth settings by the camera
+ * @param frameRate the target frame rate in frames per second.
+ *      if it is -1, use the current camera frame rate setting.
+ * @param storeMetaDataInVideoBuffers request to store meta
+ *      data or real YUV data in video buffers. Request to
+ *      store meta data in video buffers may not be honored
+ *      if the source does not support this feature.
+ *
+ * @return OK if no error.
+ */
+status_t GonkCameraSource::init(
+        Size videoSize,
+        int32_t frameRate,
+        bool storeMetaDataInVideoBuffers) {
+
+    LOGV("init");
+    status_t err = OK;
+    //TODO: need to do something here to check the sanity of camera
+
+    CameraParameters params;
+    GonkCameraHardware::PullParameters(mCameraHandle, params);
+    if ((err = isCameraColorFormatSupported(params)) != OK) {
+        return err;
+    }
+
+    // Set the camera to use the requested video frame size
+    // and/or frame rate.
+    if ((err = configureCamera(&params,
+                    videoSize.width, videoSize.height,
+                    frameRate))) {
+        return err;
+    }
+
+    // Check on video frame size and frame rate.
+    CameraParameters newCameraParams;
+    GonkCameraHardware::PullParameters(mCameraHandle, newCameraParams);
+    if ((err = checkVideoSize(newCameraParams,
+                videoSize.width, videoSize.height)) != OK) {
+        return err;
+    }
+    if ((err = checkFrameRate(newCameraParams, frameRate)) != OK) {
+        return err;
+    }
+
+    // By default, do not store metadata in video buffers
+    mIsMetaDataStoredInVideoBuffers = false;
+    GonkCameraHardware::StoreMetaDataInBuffers(mCameraHandle, false);
+    if (storeMetaDataInVideoBuffers) {
+        if (OK == GonkCameraHardware::StoreMetaDataInBuffers(mCameraHandle, true)) {
+            mIsMetaDataStoredInVideoBuffers = true;
+        }
+    }
+
+    const char *hfr_str = params.get("video-hfr");
+    int32_t hfr = -1;
+    if ( hfr_str != NULL ) {
+      hfr = atoi(hfr_str);
+    }
+    if(hfr < 0) {
+      LOGW("Invalid hfr value(%d) set from app. Disabling HFR.", hfr);
+      hfr = 0;
+    }
+
+    int64_t glitchDurationUs = (1000000LL / mVideoFrameRate);
+    if (glitchDurationUs > mGlitchDurationThresholdUs) {
+        mGlitchDurationThresholdUs = glitchDurationUs;
+    }
+
+    const char * k3dFrameArrangement = "3d-frame-format";
+    const char * arrangement = params.get(k3dFrameArrangement);
+    // XXX: just assume left/right for now since that's all the camera supports
+    bool want3D = (arrangement != NULL && !strcmp("left-right", arrangement));
+
+    // XXX: query camera for the stride and slice height
+    // when the capability becomes available.
+    mMeta = new MetaData;
+    mMeta->setCString(kKeyMIMEType,  MEDIA_MIMETYPE_VIDEO_RAW);
+    mMeta->setInt32(kKeyColorFormat, mColorFormat);
+    mMeta->setInt32(kKeyWidth,       mVideoSize.width);
+    mMeta->setInt32(kKeyHeight,      mVideoSize.height);
+    mMeta->setInt32(kKeyStride,      mVideoSize.width);
+    mMeta->setInt32(kKeySliceHeight, mVideoSize.height);
+    mMeta->setInt32(kKeyFrameRate,   mVideoFrameRate);
+
+    return OK;
+}
+
+GonkCameraSource::~GonkCameraSource() {
+    if (mStarted) {
+        stop();
+    } else if (mInitCheck == OK) {
+        // Camera is initialized but because start() is never called,
+        // the lock on Camera is never released(). This makes sure
+        // Camera's lock is released in this case.
+        // TODO: Don't think I need to do this
+        releaseCamera();
+    }
+}
+
+void GonkCameraSource::startCameraRecording() {
+    LOGV("startCameraRecording");
+    CHECK_EQ(OK, GonkCameraHardware::StartRecording(mCameraHandle));
+}
+
+status_t GonkCameraSource::start(MetaData *meta) {
+    LOGV("start");
+    CHECK(!mStarted);
+    if (mInitCheck != OK) {
+        LOGE("GonkCameraSource is not initialized yet");
+        return mInitCheck;
+    }
+
+    char value[PROPERTY_VALUE_MAX];
+    if (property_get("media.stagefright.record-stats", value, NULL)
+        && (!strcmp(value, "1") || !strcasecmp(value, "true"))) {
+        mCollectStats = true;
+    }
+
+    mStartTimeUs = 0;
+    int64_t startTimeUs;
+    if (meta && meta->findInt64(kKeyTime, &startTimeUs)) {
+        LOGV("Metadata enabled, startime: %lld us", startTimeUs);
+        mStartTimeUs = startTimeUs;
+    }
+
+    // Register a listener with GonkCameraHardware so that we can get callbacks
+    GonkCameraHardware::SetListener(mCameraHandle, new GonkCameraSourceListener(this));
+
+    startCameraRecording();
+
+    mStarted = true;
+    return OK;
+}
+
+void GonkCameraSource::stopCameraRecording() {
+    LOGV("stopCameraRecording");
+    GonkCameraHardware::StopRecording(mCameraHandle);
+}
+
+void GonkCameraSource::releaseCamera() {
+    LOGV("releaseCamera");
+}
+
+status_t GonkCameraSource::stop() {
+    LOGV("stop: E");
+    Mutex::Autolock autoLock(mLock);
+    mStarted = false;
+    mFrameAvailableCondition.signal();
+
+    releaseQueuedFrames();
+    while (!mFramesBeingEncoded.empty()) {
+        if (NO_ERROR !=
+            mFrameCompleteCondition.waitRelative(mLock,
+                    mTimeBetweenFrameCaptureUs * 1000LL + CAMERA_SOURCE_TIMEOUT_NS)) {
+            LOGW("Timed out waiting for outstanding frames being encoded: %d",
+                mFramesBeingEncoded.size());
+        }
+    }
+    LOGV("Calling stopCameraRecording");
+    stopCameraRecording();
+    releaseCamera();
+
+    if (mCollectStats) {
+        LOGI("Frames received/encoded/dropped: %d/%d/%d in %lld us",
+                mNumFramesReceived, mNumFramesEncoded, mNumFramesDropped,
+                mLastFrameTimestampUs - mFirstFrameTimeUs);
+    }
+
+    if (mNumGlitches > 0) {
+        LOGW("%d long delays between neighboring video frames", mNumGlitches);
+    }
+
+    CHECK_EQ(mNumFramesReceived, mNumFramesEncoded + mNumFramesDropped);
+    LOGV("stop: X");
+    return OK;
+}
+
+void GonkCameraSource::releaseRecordingFrame(const sp<IMemory>& frame) {
+    LOGV("releaseRecordingFrame");
+    GonkCameraHardware::ReleaseRecordingFrame(mCameraHandle, frame);
+}
+
+void GonkCameraSource::releaseQueuedFrames() {
+    List<sp<IMemory> >::iterator it;
+    while (!mFramesReceived.empty()) {
+        it = mFramesReceived.begin();
+        releaseRecordingFrame(*it);
+        mFramesReceived.erase(it);
+        ++mNumFramesDropped;
+    }
+}
+
+sp<MetaData> GonkCameraSource::getFormat() {
+    return mMeta;
+}
+
+void GonkCameraSource::releaseOneRecordingFrame(const sp<IMemory>& frame) {
+    releaseRecordingFrame(frame);
+}
+
+void GonkCameraSource::signalBufferReturned(MediaBuffer *buffer) {
+    LOGV("signalBufferReturned: %p", buffer->data());
+    Mutex::Autolock autoLock(mLock);
+    for (List<sp<IMemory> >::iterator it = mFramesBeingEncoded.begin();
+         it != mFramesBeingEncoded.end(); ++it) {
+        if ((*it)->pointer() ==  buffer->data()) {
+            releaseOneRecordingFrame((*it));
+            mFramesBeingEncoded.erase(it);
+            ++mNumFramesEncoded;
+            buffer->setObserver(0);
+            buffer->release();
+            mFrameCompleteCondition.signal();
+            return;
+        }
+    }
+    CHECK_EQ(0, "signalBufferReturned: bogus buffer");
+}
+
+status_t GonkCameraSource::read(
+        MediaBuffer **buffer, const ReadOptions *options) {
+    LOGV("read");
+
+    *buffer = NULL;
+
+    int64_t seekTimeUs;
+    ReadOptions::SeekMode mode;
+    if (options && options->getSeekTo(&seekTimeUs, &mode)) {
+        return ERROR_UNSUPPORTED;
+    }
+
+    sp<IMemory> frame;
+    int64_t frameTime;
+
+    {
+        Mutex::Autolock autoLock(mLock);
+        while (mStarted && mFramesReceived.empty()) {
+            if (NO_ERROR !=
+                mFrameAvailableCondition.waitRelative(mLock,
+                    mTimeBetweenFrameCaptureUs * 1000LL + CAMERA_SOURCE_TIMEOUT_NS)) {
+                //TODO: check sanity of camera?
+                LOGW("Timed out waiting for incoming camera video frames: %lld us",
+                    mLastFrameTimestampUs);
+            }
+        }
+        if (!mStarted) {
+            return OK;
+        }
+        frame = *mFramesReceived.begin();
+        mFramesReceived.erase(mFramesReceived.begin());
+
+        frameTime = *mFrameTimes.begin();
+        mFrameTimes.erase(mFrameTimes.begin());
+        mFramesBeingEncoded.push_back(frame);
+        *buffer = new MediaBuffer(frame->pointer(), frame->size());
+        (*buffer)->setObserver(this);
+        (*buffer)->add_ref();
+        (*buffer)->meta_data()->setInt64(kKeyTime, frameTime);
+    }
+    return OK;
+}
+
+void GonkCameraSource::dataCallbackTimestamp(int64_t timestampUs,
+        int32_t msgType, const sp<IMemory> &data) {
+    LOGV("dataCallbackTimestamp: timestamp %lld us", timestampUs);
+    //LOGV("dataCallbackTimestamp: data %x size %d", data->pointer(), data->size());
+    Mutex::Autolock autoLock(mLock);
+    if (!mStarted || (mNumFramesReceived == 0 && timestampUs < mStartTimeUs)) {
+        LOGV("Drop frame at %lld/%lld us", timestampUs, mStartTimeUs);
+        releaseOneRecordingFrame(data);
+        return;
+    }
+
+    if (mNumFramesReceived > 0) {
+        CHECK(timestampUs > mLastFrameTimestampUs);
+        if (timestampUs - mLastFrameTimestampUs > mGlitchDurationThresholdUs) {
+            ++mNumGlitches;
+        }
+    }
+
+    // May need to skip frame or modify timestamp. Currently implemented
+    // by the subclass GonkCameraSourceTimeLapse.
+    if (skipCurrentFrame(timestampUs)) {
+        releaseOneRecordingFrame(data);
+        return;
+    }
+
+    mLastFrameTimestampUs = timestampUs;
+    if (mNumFramesReceived == 0) {
+        mFirstFrameTimeUs = timestampUs;
+        // Initial delay
+        if (mStartTimeUs > 0) {
+            if (timestampUs < mStartTimeUs) {
+                // Frame was captured before recording was started
+                // Drop it without updating the statistical data.
+                releaseOneRecordingFrame(data);
+                return;
+            }
+            mStartTimeUs = timestampUs - mStartTimeUs;
+        }
+    }
+    ++mNumFramesReceived;
+
+    CHECK(data != NULL && data->size() > 0);
+    mFramesReceived.push_back(data);
+    int64_t timeUs = mStartTimeUs + (timestampUs - mFirstFrameTimeUs);
+    mFrameTimes.push_back(timeUs);
+    LOGV("initial delay: %lld, current time stamp: %lld",
+        mStartTimeUs, timeUs);
+    mFrameAvailableCondition.signal();
+}
+
+bool GonkCameraSource::isMetaDataStoredInVideoBuffers() const {
+    LOGV("isMetaDataStoredInVideoBuffers");
+    return mIsMetaDataStoredInVideoBuffers;
+}
+
+} // namespace android
new file mode 100644
--- /dev/null
+++ b/dom/camera/GonkCameraSource.h
@@ -0,0 +1,161 @@
+/*
+ * Copyright (C) 2009 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef GONK_CAMERA_SOURCE_H_
+
+#define GONK_CAMERA_SOURCE_H_
+
+#include <media/stagefright/MediaBuffer.h>
+#include <media/stagefright/MediaSource.h>
+#include <camera/CameraParameters.h>
+#include <utils/List.h>
+#include <utils/RefBase.h>
+#include <utils/threads.h>
+
+namespace android {
+
+class IMemory;
+class GonkCameraSourceListener;
+
+class GonkCameraSource : public MediaSource, public MediaBufferObserver {
+public:
+
+    static GonkCameraSource *Create(int32_t cameraHandle,
+                                    Size videoSize,
+                                    int32_t frameRate,
+                                    bool storeMetaDataInVideoBuffers = false);
+
+    virtual ~GonkCameraSource();
+
+    virtual status_t start(MetaData *params = NULL);
+    virtual status_t stop();
+    virtual status_t read(
+            MediaBuffer **buffer, const ReadOptions *options = NULL);
+
+    /**
+     * Check whether a GonkCameraSource object is properly initialized.
+     * Must call this method before stop().
+     * @return OK if initialization has successfully completed.
+     */
+    virtual status_t initCheck() const;
+
+    /**
+     * Returns the MetaData associated with the GonkCameraSource,
+     * including:
+     * kKeyColorFormat: YUV color format of the video frames
+     * kKeyWidth, kKeyHeight: dimension (in pixels) of the video frames
+     * kKeySampleRate: frame rate in frames per second
+     * kKeyMIMEType: always fixed to be MEDIA_MIMETYPE_VIDEO_RAW
+     */
+    virtual sp<MetaData> getFormat();
+
+    /**
+     * Tell whether this camera source stores meta data or real YUV
+     * frame data in video buffers.
+     *
+     * @return true if meta data is stored in the video
+     *      buffers; false if real YUV data is stored in
+     *      the video buffers.
+     */
+    bool isMetaDataStoredInVideoBuffers() const;
+
+    virtual void signalBufferReturned(MediaBuffer* buffer);
+
+protected:
+
+    enum CameraFlags {
+        FLAGS_SET_CAMERA = 1L << 0,
+        FLAGS_HOT_CAMERA = 1L << 1,
+    };
+
+    int32_t  mCameraFlags;
+    Size     mVideoSize;
+    int32_t  mVideoFrameRate;
+    int32_t  mColorFormat;
+    status_t mInitCheck;
+
+    sp<MetaData> mMeta;
+
+    int64_t mStartTimeUs;
+    int32_t mNumFramesReceived;
+    int64_t mLastFrameTimestampUs;
+    bool mStarted;
+    int32_t mNumFramesEncoded;
+
+    // Time between capture of two frames.
+    int64_t mTimeBetweenFrameCaptureUs;
+
+    GonkCameraSource(int32_t cameraHandle,
+                 Size videoSize, int32_t frameRate,
+                 bool storeMetaDataInVideoBuffers = false);
+
+    virtual void startCameraRecording();
+    virtual void stopCameraRecording();
+    virtual void releaseRecordingFrame(const sp<IMemory>& frame);
+
+    // Returns true if need to skip the current frame.
+    // Called from dataCallbackTimestamp.
+    virtual bool skipCurrentFrame(int64_t timestampUs) {return false;}
+
+    friend class GonkCameraSourceListener;
+    // Callback called when still camera raw data is available.
+    virtual void dataCallback(int32_t msgType, const sp<IMemory> &data) {}
+
+    virtual void dataCallbackTimestamp(int64_t timestampUs, int32_t msgType,
+            const sp<IMemory> &data);
+
+private:
+
+    Mutex mLock;
+    Condition mFrameAvailableCondition;
+    Condition mFrameCompleteCondition;
+    List<sp<IMemory> > mFramesReceived;
+    List<sp<IMemory> > mFramesBeingEncoded;
+    List<int64_t> mFrameTimes;
+
+    int64_t mFirstFrameTimeUs;
+    int32_t mNumFramesDropped;
+    int32_t mNumGlitches;
+    int64_t mGlitchDurationThresholdUs;
+    bool mCollectStats;
+    bool mIsMetaDataStoredInVideoBuffers;
+    int32_t mCameraHandle;
+
+    void releaseQueuedFrames();
+    void releaseOneRecordingFrame(const sp<IMemory>& frame);
+
+    status_t init(Size videoSize, int32_t frameRate,
+                  bool storeMetaDataInVideoBuffers);
+    status_t isCameraColorFormatSupported(const CameraParameters& params);
+    status_t configureCamera(CameraParameters* params,
+                    int32_t width, int32_t height,
+                    int32_t frameRate);
+
+    status_t checkVideoSize(const CameraParameters& params,
+                    int32_t width, int32_t height);
+
+    status_t checkFrameRate(const CameraParameters& params,
+                    int32_t frameRate);
+
+    void releaseCamera();
+
+    GonkCameraSource(const GonkCameraSource &);
+    GonkCameraSource &operator=(const GonkCameraSource &);
+};
+
+}  // namespace android
+
+#endif  // GONK_CAMERA_SOURCE_H_
new file mode 100644
--- /dev/null
+++ b/dom/camera/GonkRecorder.cpp
@@ -0,0 +1,1629 @@
+/*
+ * Copyright (C) 2009 The Android Open Source Project
+ * Copyright (c) 2011, Code Aurora Forum. All rights reserved.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+//#define LOG_NDEBUG 0
+#define LOG_TAG "GonkRecorder"
+
+#include <utils/Log.h>
+#include <media/AudioParameter.h>
+#include "GonkRecorder.h"
+
+#include <media/stagefright/AudioSource.h>
+#include <media/stagefright/AMRWriter.h>
+#include <media/stagefright/MPEG2TSWriter.h>
+#include <media/stagefright/MPEG4Writer.h>
+#include <media/stagefright/MediaDebug.h>
+#include <media/stagefright/MediaDefs.h>
+#include <media/stagefright/MetaData.h>
+#include <OMX.h>
+#include <media/stagefright/OMXCodec.h>
+#include <media/MediaProfiles.h>
+#include <utils/String8.h>
+
+#include <utils/Errors.h>
+#include <sys/types.h>
+#include <ctype.h>
+#include <unistd.h>
+
+#include <system/audio.h>
+
+#include "ARTPWriter.h"
+
+#include <cutils/properties.h>
+#include "GonkCameraSource.h"
+
+namespace android {
+
+static sp<IOMX> sOMX = NULL;
+static sp<IOMX> GetOMX() {
+  if(sOMX.get() == NULL) {
+    sOMX = new OMX;
+    }
+  return sOMX;
+}
+
+GonkRecorder::GonkRecorder()
+    : mWriter(NULL),
+      mOutputFd(-1),
+      mAudioSource(AUDIO_SOURCE_CNT),
+      mVideoSource(VIDEO_SOURCE_LIST_END),
+      mStarted(false),
+      mDisableAudio(false) {
+
+    LOGV("Constructor");
+    reset();
+}
+
+GonkRecorder::~GonkRecorder() {
+    LOGV("Destructor");
+    stop();
+}
+
+status_t GonkRecorder::init() {
+    LOGV("init");
+    return OK;
+}
+
+status_t GonkRecorder::setAudioSource(audio_source_t as) {
+    LOGV("setAudioSource: %d", as);
+    if (as < AUDIO_SOURCE_DEFAULT ||
+        as >= AUDIO_SOURCE_CNT) {
+        LOGE("Invalid audio source: %d", as);
+        return BAD_VALUE;
+    }
+
+    if (mDisableAudio) {
+        return OK;
+    }
+
+    if (as == AUDIO_SOURCE_DEFAULT) {
+        mAudioSource = AUDIO_SOURCE_MIC;
+    } else {
+        mAudioSource = as;
+    }
+
+    return OK;
+}
+
+status_t GonkRecorder::setVideoSource(video_source vs) {
+    LOGV("setVideoSource: %d", vs);
+    if (vs < VIDEO_SOURCE_DEFAULT ||
+        vs >= VIDEO_SOURCE_LIST_END) {
+        LOGE("Invalid video source: %d", vs);
+        return BAD_VALUE;
+    }
+
+    if (vs == VIDEO_SOURCE_DEFAULT) {
+        mVideoSource = VIDEO_SOURCE_CAMERA;
+    } else {
+        mVideoSource = vs;
+    }
+
+    return OK;
+}
+
+status_t GonkRecorder::setOutputFormat(output_format of) {
+    LOGV("setOutputFormat: %d", of);
+    if (of < OUTPUT_FORMAT_DEFAULT ||
+        of >= OUTPUT_FORMAT_LIST_END) {
+        LOGE("Invalid output format: %d", of);
+        return BAD_VALUE;
+    }
+
+    if (of == OUTPUT_FORMAT_DEFAULT) {
+        mOutputFormat = OUTPUT_FORMAT_THREE_GPP;
+    } else {
+        mOutputFormat = of;
+    }
+
+    return OK;
+}
+
+status_t GonkRecorder::setAudioEncoder(audio_encoder ae) {
+    LOGV("setAudioEncoder: %d", ae);
+    if (ae < AUDIO_ENCODER_DEFAULT ||
+        ae >= AUDIO_ENCODER_LIST_END) {
+        LOGE("Invalid audio encoder: %d", ae);
+        return BAD_VALUE;
+    }
+
+    if (mDisableAudio) {
+        return OK;
+    }
+
+    if (ae == AUDIO_ENCODER_DEFAULT) {
+        mAudioEncoder = AUDIO_ENCODER_AMR_NB;
+    } else {
+        mAudioEncoder = ae;
+    }
+
+    return OK;
+}
+
+status_t GonkRecorder::setVideoEncoder(video_encoder ve) {
+    LOGV("setVideoEncoder: %d", ve);
+    if (ve < VIDEO_ENCODER_DEFAULT ||
+        ve >= VIDEO_ENCODER_LIST_END) {
+        LOGE("Invalid video encoder: %d", ve);
+        return BAD_VALUE;
+    }
+
+    if (ve == VIDEO_ENCODER_DEFAULT) {
+        mVideoEncoder = VIDEO_ENCODER_H263;
+    } else {
+        mVideoEncoder = ve;
+    }
+
+    return OK;
+}
+
+status_t GonkRecorder::setVideoSize(int width, int height) {
+    LOGV("setVideoSize: %dx%d", width, height);
+    if (width <= 0 || height <= 0) {
+        LOGE("Invalid video size: %dx%d", width, height);
+        return BAD_VALUE;
+    }
+
+    // Additional check on the dimension will be performed later
+    mVideoWidth = width;
+    mVideoHeight = height;
+
+    return OK;
+}
+
+status_t GonkRecorder::setVideoFrameRate(int frames_per_second) {
+    LOGV("setVideoFrameRate: %d", frames_per_second);
+    if ((frames_per_second <= 0 && frames_per_second != -1) ||
+        frames_per_second > 120) {
+        LOGE("Invalid video frame rate: %d", frames_per_second);
+        return BAD_VALUE;
+    }
+
+    // Additional check on the frame rate will be performed later
+    mFrameRate = frames_per_second;
+
+    return OK;
+}
+
+status_t GonkRecorder::setOutputFile(const char *path) {
+    LOGE("setOutputFile(const char*) must not be called");
+    // We don't actually support this at all, as the media_server process
+    // no longer has permissions to create files.
+
+    return -EPERM;
+}
+
+status_t GonkRecorder::setOutputFile(int fd, int64_t offset, int64_t length) {
+    LOGV("setOutputFile: %d, %lld, %lld", fd, offset, length);
+    // These don't make any sense, do they?
+    CHECK_EQ(offset, 0);
+    CHECK_EQ(length, 0);
+
+    if (fd < 0) {
+        LOGE("Invalid file descriptor: %d", fd);
+        return -EBADF;
+    }
+
+    if (mOutputFd >= 0) {
+        ::close(mOutputFd);
+    }
+    mOutputFd = dup(fd);
+
+    return OK;
+}
+
+// Attempt to parse an int64 literal optionally surrounded by whitespace,
+// returns true on success, false otherwise.
+static bool safe_strtoi64(const char *s, int64_t *val) {
+    char *end;
+
+    // It is lame, but according to man page, we have to set errno to 0
+    // before calling strtoll().
+    errno = 0;
+    *val = strtoll(s, &end, 10);
+
+    if (end == s || errno == ERANGE) {
+        return false;
+    }
+
+    // Skip trailing whitespace
+    while (isspace(*end)) {
+        ++end;
+    }
+
+    // For a successful return, the string must contain nothing but a valid
+    // int64 literal optionally surrounded by whitespace.
+
+    return *end == '\0';
+}
+
+// Return true if the value is in [0, 0x007FFFFFFF]
+static bool safe_strtoi32(const char *s, int32_t *val) {
+    int64_t temp;
+    if (safe_strtoi64(s, &temp)) {
+        if (temp >= 0 && temp <= 0x007FFFFFFF) {
+            *val = static_cast<int32_t>(temp);
+            return true;
+        }
+    }
+    return false;
+}
+
+// Trim both leading and trailing whitespace from the given string.
+static void TrimString(String8 *s) {
+    size_t num_bytes = s->bytes();
+    const char *data = s->string();
+
+    size_t leading_space = 0;
+    while (leading_space < num_bytes && isspace(data[leading_space])) {
+        ++leading_space;
+    }
+
+    size_t i = num_bytes;
+    while (i > leading_space && isspace(data[i - 1])) {
+        --i;
+    }
+
+    s->setTo(String8(&data[leading_space], i - leading_space));
+}
+
+status_t GonkRecorder::setParamAudioSamplingRate(int32_t sampleRate) {
+    LOGV("setParamAudioSamplingRate: %d", sampleRate);
+    if (sampleRate <= 0) {
+        LOGE("Invalid audio sampling rate: %d", sampleRate);
+        return BAD_VALUE;
+    }
+
+    // Additional check on the sample rate will be performed later.
+    mSampleRate = sampleRate;
+    return OK;
+}
+
+status_t GonkRecorder::setParamAudioNumberOfChannels(int32_t channels) {
+    LOGV("setParamAudioNumberOfChannels: %d", channels);
+    if (channels <= 0 || channels >= 3) {
+        LOGE("Invalid number of audio channels: %d", channels);
+        return BAD_VALUE;
+    }
+
+    // Additional check on the number of channels will be performed later.
+    mAudioChannels = channels;
+    return OK;
+}
+
+status_t GonkRecorder::setParamAudioEncodingBitRate(int32_t bitRate) {
+    LOGV("setParamAudioEncodingBitRate: %d", bitRate);
+    if (bitRate <= 0) {
+        LOGE("Invalid audio encoding bit rate: %d", bitRate);
+        return BAD_VALUE;
+    }
+
+    // The target bit rate may not be exactly the same as the requested.
+    // It depends on many factors, such as rate control, and the bit rate
+    // range that a specific encoder supports. The mismatch between the
+    // the target and requested bit rate will NOT be treated as an error.
+    mAudioBitRate = bitRate;
+    return OK;
+}
+
+status_t GonkRecorder::setParamVideoEncodingBitRate(int32_t bitRate) {
+    LOGV("setParamVideoEncodingBitRate: %d", bitRate);
+    if (bitRate <= 0) {
+        LOGE("Invalid video encoding bit rate: %d", bitRate);
+        return BAD_VALUE;
+    }
+
+    // The target bit rate may not be exactly the same as the requested.
+    // It depends on many factors, such as rate control, and the bit rate
+    // range that a specific encoder supports. The mismatch between the
+    // the target and requested bit rate will NOT be treated as an error.
+    mVideoBitRate = bitRate;
+    return OK;
+}
+
+// Always rotate clockwise, and only support 0, 90, 180 and 270 for now.
+status_t GonkRecorder::setParamVideoRotation(int32_t degrees) {
+    LOGV("setParamVideoRotation: %d", degrees);
+    if (degrees < 0 || degrees % 90 != 0) {
+        LOGE("Unsupported video rotation angle: %d", degrees);
+        return BAD_VALUE;
+    }
+    mRotationDegrees = degrees % 360;
+    return OK;
+}
+
+status_t GonkRecorder::setParamMaxFileDurationUs(int64_t timeUs) {
+    LOGV("setParamMaxFileDurationUs: %lld us", timeUs);
+
+    // This is meant for backward compatibility for MediaRecorder.java
+    if (timeUs <= 0) {
+        LOGW("Max file duration is not positive: %lld us. Disabling duration limit.", timeUs);
+        timeUs = 0; // Disable the duration limit for zero or negative values.
+    } else if (timeUs <= 100000LL) {  // XXX: 100 milli-seconds
+        LOGE("Max file duration is too short: %lld us", timeUs);
+        return BAD_VALUE;
+    }
+
+    if (timeUs <= 15 * 1000000LL) {
+        LOGW("Target duration (%lld us) too short to be respected", timeUs);
+    }
+    mMaxFileDurationUs = timeUs;
+    return OK;
+}
+
+status_t GonkRecorder::setParamMaxFileSizeBytes(int64_t bytes) {
+    LOGV("setParamMaxFileSizeBytes: %lld bytes", bytes);
+
+    // This is meant for backward compatibility for MediaRecorder.java
+    if (bytes <= 0) {
+        LOGW("Max file size is not positive: %lld bytes. "
+             "Disabling file size limit.", bytes);
+        bytes = 0; // Disable the file size limit for zero or negative values.
+    } else if (bytes <= 1024) {  // XXX: 1 kB
+        LOGE("Max file size is too small: %lld bytes", bytes);
+        return BAD_VALUE;
+    }
+
+    if (bytes <= 100 * 1024) {
+        LOGW("Target file size (%lld bytes) is too small to be respected", bytes);
+    }
+
+    if (bytes >= 0xffffffffLL) {
+        LOGW("Target file size (%lld bytes) too larger than supported, clip to 4GB", bytes);
+        bytes = 0xffffffffLL;
+    }
+
+    mMaxFileSizeBytes = bytes;
+    return OK;
+}
+
+status_t GonkRecorder::setParamInterleaveDuration(int32_t durationUs) {
+    LOGV("setParamInterleaveDuration: %d", durationUs);
+    if (durationUs <= 500000) {           //  500 ms
+        // If interleave duration is too small, it is very inefficient to do
+        // interleaving since the metadata overhead will count for a significant
+        // portion of the saved contents
+        LOGE("Audio/video interleave duration is too small: %d us", durationUs);
+        return BAD_VALUE;
+    } else if (durationUs >= 10000000) {  // 10 seconds
+        // If interleaving duration is too large, it can cause the recording
+        // session to use too much memory since we have to save the output
+        // data before we write them out
+        LOGE("Audio/video interleave duration is too large: %d us", durationUs);
+        return BAD_VALUE;
+    }
+    mInterleaveDurationUs = durationUs;
+    return OK;
+}
+
+// If seconds <  0, only the first frame is I frame, and rest are all P frames
+// If seconds == 0, all frames are encoded as I frames. No P frames
+// If seconds >  0, it is the time spacing (seconds) between 2 neighboring I frames
+status_t GonkRecorder::setParamVideoIFramesInterval(int32_t seconds) {
+    LOGV("setParamVideoIFramesInterval: %d seconds", seconds);
+    mIFramesIntervalSec = seconds;
+    return OK;
+}
+
+status_t GonkRecorder::setParam64BitFileOffset(bool use64Bit) {
+    LOGV("setParam64BitFileOffset: %s",
+        use64Bit? "use 64 bit file offset": "use 32 bit file offset");
+    mUse64BitFileOffset = use64Bit;
+    return OK;
+}
+
+status_t GonkRecorder::setParamVideoCameraId(int32_t cameraId) {
+    LOGV("setParamVideoCameraId: %d", cameraId);
+    if (cameraId < 0) {
+        return BAD_VALUE;
+    }
+    mCameraId = cameraId;
+    return OK;
+}
+
+status_t GonkRecorder::setParamTrackTimeStatus(int64_t timeDurationUs) {
+    LOGV("setParamTrackTimeStatus: %lld", timeDurationUs);
+    if (timeDurationUs < 20000) {  // Infeasible if shorter than 20 ms?
+        LOGE("Tracking time duration too short: %lld us", timeDurationUs);
+        return BAD_VALUE;
+    }
+    mTrackEveryTimeDurationUs = timeDurationUs;
+    return OK;
+}
+
+status_t GonkRecorder::setParamVideoEncoderProfile(int32_t profile) {
+    LOGV("setParamVideoEncoderProfile: %d", profile);
+
+    // Additional check will be done later when we load the encoder.
+    // For now, we are accepting values defined in OpenMAX IL.
+    mVideoEncoderProfile = profile;
+    return OK;
+}
+
+status_t GonkRecorder::setParamVideoEncoderLevel(int32_t level) {
+    LOGV("setParamVideoEncoderLevel: %d", level);
+
+    // Additional check will be done later when we load the encoder.
+    // For now, we are accepting values defined in OpenMAX IL.
+    mVideoEncoderLevel = level;
+    return OK;
+}
+
+status_t GonkRecorder::setParamMovieTimeScale(int32_t timeScale) {
+    LOGV("setParamMovieTimeScale: %d", timeScale);
+
+    // The range is set to be the same as the audio's time scale range
+    // since audio's time scale has a wider range.
+    if (timeScale < 600 || timeScale > 96000) {
+        LOGE("Time scale (%d) for movie is out of range [600, 96000]", timeScale);
+        return BAD_VALUE;
+    }
+    mMovieTimeScale = timeScale;
+    return OK;
+}
+
+status_t GonkRecorder::setParamVideoTimeScale(int32_t timeScale) {
+    LOGV("setParamVideoTimeScale: %d", timeScale);
+
+    // 60000 is chosen to make sure that each video frame from a 60-fps
+    // video has 1000 ticks.
+    if (timeScale < 600 || timeScale > 60000) {
+        LOGE("Time scale (%d) for video is out of range [600, 60000]", timeScale);
+        return BAD_VALUE;
+    }
+    mVideoTimeScale = timeScale;
+    return OK;
+}
+
+status_t GonkRecorder::setParamAudioTimeScale(int32_t timeScale) {
+    LOGV("setParamAudioTimeScale: %d", timeScale);
+
+    // 96000 Hz is the highest sampling rate support in AAC.
+    if (timeScale < 600 || timeScale > 96000) {
+        LOGE("Time scale (%d) for audio is out of range [600, 96000]", timeScale);
+        return BAD_VALUE;
+    }
+    mAudioTimeScale = timeScale;
+    return OK;
+}
+
+status_t GonkRecorder::setParamGeoDataLongitude(
+    int64_t longitudex10000) {
+
+    if (longitudex10000 > 1800000 || longitudex10000 < -1800000) {
+        return BAD_VALUE;
+    }
+    mLongitudex10000 = longitudex10000;
+    return OK;
+}
+
+status_t GonkRecorder::setParamGeoDataLatitude(
+    int64_t latitudex10000) {
+
+    if (latitudex10000 > 900000 || latitudex10000 < -900000) {
+        return BAD_VALUE;
+    }
+    mLatitudex10000 = latitudex10000;
+    return OK;
+}
+
+status_t GonkRecorder::setParameter(
+        const String8 &key, const String8 &value) {
+    LOGV("setParameter: key (%s) => value (%s)", key.string(), value.string());
+    if (key == "max-duration") {
+        int64_t max_duration_ms;
+        if (safe_strtoi64(value.string(), &max_duration_ms)) {
+            return setParamMaxFileDurationUs(1000LL * max_duration_ms);
+        }
+    } else if (key == "max-filesize") {
+        int64_t max_filesize_bytes;
+        if (safe_strtoi64(value.string(), &max_filesize_bytes)) {
+            return setParamMaxFileSizeBytes(max_filesize_bytes);
+        }
+    } else if (key == "interleave-duration-us") {
+        int32_t durationUs;
+        if (safe_strtoi32(value.string(), &durationUs)) {
+            return setParamInterleaveDuration(durationUs);
+        }
+    } else if (key == "param-movie-time-scale") {
+        int32_t timeScale;
+        if (safe_strtoi32(value.string(), &timeScale)) {
+            return setParamMovieTimeScale(timeScale);
+        }
+    } else if (key == "param-use-64bit-offset") {
+        int32_t use64BitOffset;
+        if (safe_strtoi32(value.string(), &use64BitOffset)) {
+            return setParam64BitFileOffset(use64BitOffset != 0);
+        }
+    } else if (key == "param-geotag-longitude") {
+        int64_t longitudex10000;
+        if (safe_strtoi64(value.string(), &longitudex10000)) {
+            return setParamGeoDataLongitude(longitudex10000);
+        }
+    } else if (key == "param-geotag-latitude") {
+        int64_t latitudex10000;
+        if (safe_strtoi64(value.string(), &latitudex10000)) {
+            return setParamGeoDataLatitude(latitudex10000);
+        }
+    } else if (key == "param-track-time-status") {
+        int64_t timeDurationUs;
+        if (safe_strtoi64(value.string(), &timeDurationUs)) {
+            return setParamTrackTimeStatus(timeDurationUs);
+        }
+    } else if (key == "audio-param-sampling-rate") {
+        int32_t sampling_rate;
+        if (safe_strtoi32(value.string(), &sampling_rate)) {
+            return setParamAudioSamplingRate(sampling_rate);
+        }
+    } else if (key == "audio-param-number-of-channels") {
+        int32_t number_of_channels;
+        if (safe_strtoi32(value.string(), &number_of_channels)) {
+            return setParamAudioNumberOfChannels(number_of_channels);
+        }
+    } else if (key == "audio-param-encoding-bitrate") {
+        int32_t audio_bitrate;
+        if (safe_strtoi32(value.string(), &audio_bitrate)) {
+            return setParamAudioEncodingBitRate(audio_bitrate);
+        }
+    } else if (key == "audio-param-time-scale") {
+        int32_t timeScale;
+        if (safe_strtoi32(value.string(), &timeScale)) {
+            return setParamAudioTimeScale(timeScale);
+        }
+    } else if (key == "video-param-encoding-bitrate") {
+        int32_t video_bitrate;
+        if (safe_strtoi32(value.string(), &video_bitrate)) {
+            return setParamVideoEncodingBitRate(video_bitrate);
+        }
+    } else if (key == "video-param-rotation-angle-degrees") {
+        int32_t degrees;
+        if (safe_strtoi32(value.string(), &degrees)) {
+            return setParamVideoRotation(degrees);
+        }
+    } else if (key == "video-param-i-frames-interval") {
+        int32_t seconds;
+        if (safe_strtoi32(value.string(), &seconds)) {
+            return setParamVideoIFramesInterval(seconds);
+        }
+    } else if (key == "video-param-encoder-profile") {
+        int32_t profile;
+        if (safe_strtoi32(value.string(), &profile)) {
+            return setParamVideoEncoderProfile(profile);
+        }
+    } else if (key == "video-param-encoder-level") {
+        int32_t level;
+        if (safe_strtoi32(value.string(), &level)) {
+            return setParamVideoEncoderLevel(level);
+        }
+    } else if (key == "video-param-camera-id") {
+        int32_t cameraId;
+        if (safe_strtoi32(value.string(), &cameraId)) {
+            return setParamVideoCameraId(cameraId);
+        }
+    } else if (key == "video-param-time-scale") {
+        int32_t timeScale;
+        if (safe_strtoi32(value.string(), &timeScale)) {
+            return setParamVideoTimeScale(timeScale);
+        }
+    } else {
+        LOGE("setParameter: failed to find key %s", key.string());
+    }
+    return BAD_VALUE;
+}
+
+status_t GonkRecorder::setParameters(const String8 &params) {
+    LOGV("setParameters: %s", params.string());
+    const char *cparams = params.string();
+    const char *key_start = cparams;
+    for (;;) {
+        const char *equal_pos = strchr(key_start, '=');
+        if (equal_pos == NULL) {
+            LOGE("Parameters %s miss a value", cparams);
+            return BAD_VALUE;
+        }
+        String8 key(key_start, equal_pos - key_start);
+        TrimString(&key);
+        if (key.length() == 0) {
+            LOGE("Parameters %s contains an empty key", cparams);
+            return BAD_VALUE;
+        }
+        const char *value_start = equal_pos + 1;
+        const char *semicolon_pos = strchr(value_start, ';');
+        String8 value;
+        if (semicolon_pos == NULL) {
+            value.setTo(value_start);
+        } else {
+            value.setTo(value_start, semicolon_pos - value_start);
+        }
+        if (setParameter(key, value) != OK) {
+            return BAD_VALUE;
+        }
+        if (semicolon_pos == NULL) {
+            break;  // Reaches the end
+        }
+        key_start = semicolon_pos + 1;
+    }
+    return OK;
+}
+
+status_t GonkRecorder::setListener(const sp<IMediaRecorderClient> &listener) {
+    mListener = listener;
+
+    return OK;
+}
+
+status_t GonkRecorder::prepare() {
+  LOGV(" %s E", __func__ );
+
+  if(mVideoSource != VIDEO_SOURCE_LIST_END && mVideoEncoder != VIDEO_ENCODER_LIST_END && mVideoHeight && mVideoWidth &&             /*Video recording*/
+         (mMaxFileDurationUs <=0 ||             /*Max duration is not set*/
+         (mVideoHeight * mVideoWidth < 720 * 1280 && mMaxFileDurationUs > 30*60*1000*1000) ||
+         (mVideoHeight * mVideoWidth >= 720 * 1280 && mMaxFileDurationUs > 10*60*1000*1000))) {
+    /*Above Check can be further optimized for lower resolutions to reduce file size*/
+    LOGV("File is huge so setting 64 bit file offsets");
+    setParam64BitFileOffset(true);
+  }
+  LOGV(" %s X", __func__ );
+  return OK;
+}
+
+status_t GonkRecorder::start() {
+    CHECK(mOutputFd >= 0);
+
+    if (mWriter != NULL) {
+        LOGE("File writer is not available");
+        return UNKNOWN_ERROR;
+    }
+
+    status_t status = OK;
+
+    switch (mOutputFormat) {
+        case OUTPUT_FORMAT_DEFAULT:
+        case OUTPUT_FORMAT_THREE_GPP:
+        case OUTPUT_FORMAT_MPEG_4:
+            status = startMPEG4Recording();
+            break;
+
+        case OUTPUT_FORMAT_AMR_NB:
+        case OUTPUT_FORMAT_AMR_WB:
+            status = startAMRRecording();
+            break;
+
+        case OUTPUT_FORMAT_MPEG2TS:
+            status = startMPEG2TSRecording();
+		    break;
+        default:
+            LOGE("Unsupported output file format: %d", mOutputFormat);
+            status = UNKNOWN_ERROR;
+            break;
+    }
+
+    if ((status == OK) && (!mStarted)) {
+        mStarted = true;
+    }
+
+    return status;
+}
+
+sp<MediaSource> GonkRecorder::createAudioSource() {
+
+    sp<AudioSource> audioSource =
+        new AudioSource(
+                mAudioSource,
+                mSampleRate,
+                mAudioChannels);
+
+    status_t err = audioSource->initCheck();
+
+    if (err != OK) {
+        LOGE("audio source is not initialized");
+        return NULL;
+    }
+
+    sp<MetaData> encMeta = new MetaData;
+    const char *mime;
+    switch (mAudioEncoder) {
+        case AUDIO_ENCODER_AMR_NB:
+        case AUDIO_ENCODER_DEFAULT:
+            mime = MEDIA_MIMETYPE_AUDIO_AMR_NB;
+            break;
+        case AUDIO_ENCODER_AMR_WB:
+            mime = MEDIA_MIMETYPE_AUDIO_AMR_WB;
+            break;
+        case AUDIO_ENCODER_AAC:
+            mime = MEDIA_MIMETYPE_AUDIO_AAC;
+            break;
+        default:
+            LOGE("Unknown audio encoder: %d", mAudioEncoder);
+            return NULL;
+    }
+    encMeta->setCString(kKeyMIMEType, mime);
+
+    int32_t maxInputSize;
+    CHECK(audioSource->getFormat()->findInt32(
+                kKeyMaxInputSize, &maxInputSize));
+
+    encMeta->setInt32(kKeyMaxInputSize, maxInputSize);
+    encMeta->setInt32(kKeyChannelCount, mAudioChannels);
+    encMeta->setInt32(kKeySampleRate, mSampleRate);
+    encMeta->setInt32(kKeyBitRate, mAudioBitRate);
+    if (mAudioTimeScale > 0) {
+        encMeta->setInt32(kKeyTimeScale, mAudioTimeScale);
+    }
+
+    // use direct OMX interface instead of connecting to
+    // mediaserver over binder calls
+    sp<MediaSource> audioEncoder =
+        OMXCodec::Create(GetOMX(), encMeta,
+                         true /* createEncoder */, audioSource);
+    mAudioSourceNode = audioSource;
+
+    return audioEncoder;
+}
+
+status_t GonkRecorder::startAMRRecording() {
+    CHECK(mOutputFormat == OUTPUT_FORMAT_AMR_NB ||
+          mOutputFormat == OUTPUT_FORMAT_AMR_WB);
+
+    if (mOutputFormat == OUTPUT_FORMAT_AMR_NB) {
+        if (mAudioEncoder != AUDIO_ENCODER_DEFAULT &&
+            mAudioEncoder != AUDIO_ENCODER_AMR_NB) {
+            LOGE("Invalid encoder %d used for AMRNB recording",
+                    mAudioEncoder);
+            return BAD_VALUE;
+        }
+    } else {  // mOutputFormat must be OUTPUT_FORMAT_AMR_WB
+        if (mAudioEncoder != AUDIO_ENCODER_AMR_WB) {
+            LOGE("Invlaid encoder %d used for AMRWB recording",
+                    mAudioEncoder);
+            return BAD_VALUE;
+        }
+    }
+
+    mWriter = new AMRWriter(mOutputFd);
+    status_t status = startRawAudioRecording();
+    if (status != OK) {
+        mWriter.clear();
+        mWriter = NULL;
+    }
+    return status;
+}
+
+status_t GonkRecorder::startRawAudioRecording() {
+    if (mAudioSource >= AUDIO_SOURCE_CNT) {
+        LOGE("Invalid audio source: %d", mAudioSource);
+        return BAD_VALUE;
+    }
+
+    status_t status = BAD_VALUE;
+    if (OK != (status = checkAudioEncoderCapabilities())) {
+        return status;
+    }
+
+    sp<MediaSource> audioEncoder = createAudioSource();
+    if (audioEncoder == NULL) {
+        return UNKNOWN_ERROR;
+    }
+
+    CHECK(mWriter != 0);
+    mWriter->addSource(audioEncoder);
+
+    if (mMaxFileDurationUs != 0) {
+        mWriter->setMaxFileDuration(mMaxFileDurationUs);
+    }
+    if (mMaxFileSizeBytes != 0) {
+        mWriter->setMaxFileSize(mMaxFileSizeBytes);
+    }
+    mWriter->setListener(mListener);
+    mWriter->start();
+
+    return OK;
+}
+
+status_t GonkRecorder::startMPEG2TSRecording() {
+    CHECK_EQ(mOutputFormat, OUTPUT_FORMAT_MPEG2TS);
+
+    sp<MediaWriter> writer = new MPEG2TSWriter(mOutputFd);
+
+    if (mAudioSource != AUDIO_SOURCE_CNT) {
+        if (mAudioEncoder != AUDIO_ENCODER_AAC) {
+            return ERROR_UNSUPPORTED;
+        }
+
+        status_t err = setupAudioEncoder(writer);
+
+        if (err != OK) {
+            return err;
+        }
+    }
+
+    if (mVideoSource < VIDEO_SOURCE_LIST_END) {
+        if (mVideoEncoder != VIDEO_ENCODER_H264) {
+            return ERROR_UNSUPPORTED;
+        }
+
+        sp<MediaSource> mediaSource;
+        status_t err = setupMediaSource(&mediaSource);
+        if (err != OK) {
+            return err;
+        }
+
+        sp<MediaSource> encoder;
+        err = setupVideoEncoder(mediaSource, mVideoBitRate, &encoder);
+
+        if (err != OK) {
+            return err;
+        }
+
+        writer->addSource(encoder);
+    }
+
+    if (mMaxFileDurationUs != 0) {
+        writer->setMaxFileDuration(mMaxFileDurationUs);
+    }
+
+    if (mMaxFileSizeBytes != 0) {
+        writer->setMaxFileSize(mMaxFileSizeBytes);
+    }
+
+    mWriter = writer;
+
+    return mWriter->start();
+}
+
+void GonkRecorder::clipVideoFrameRate() {
+    LOGV("clipVideoFrameRate: encoder %d", mVideoEncoder);
+    int minFrameRate = mEncoderProfiles->getVideoEncoderParamByName(
+                        "enc.vid.fps.min", mVideoEncoder);
+    int maxFrameRate = mEncoderProfiles->getVideoEncoderParamByName(
+                        "enc.vid.fps.max", mVideoEncoder);
+    if (mFrameRate < minFrameRate && mFrameRate != -1) {
+        LOGW("Intended video encoding frame rate (%d fps) is too small"
+             " and will be set to (%d fps)", mFrameRate, minFrameRate);
+        mFrameRate = minFrameRate;
+    } else if (mFrameRate > maxFrameRate) {
+        LOGW("Intended video encoding frame rate (%d fps) is too large"
+             " and will be set to (%d fps)", mFrameRate, maxFrameRate);
+        mFrameRate = maxFrameRate;
+    }
+}
+
+void GonkRecorder::clipVideoBitRate() {
+    LOGV("clipVideoBitRate: encoder %d", mVideoEncoder);
+    int minBitRate = mEncoderProfiles->getVideoEncoderParamByName(
+                        "enc.vid.bps.min", mVideoEncoder);
+    int maxBitRate = mEncoderProfiles->getVideoEncoderParamByName(
+                        "enc.vid.bps.max", mVideoEncoder);
+    if (mVideoBitRate < minBitRate) {
+        LOGW("Intended video encoding bit rate (%d bps) is too small"
+             " and will be set to (%d bps)", mVideoBitRate, minBitRate);
+        mVideoBitRate = minBitRate;
+    } else if (mVideoBitRate > maxBitRate) {
+        LOGW("Intended video encoding bit rate (%d bps) is too large"
+             " and will be set to (%d bps)", mVideoBitRate, maxBitRate);
+        mVideoBitRate = maxBitRate;
+    }
+}
+
+void GonkRecorder::clipVideoFrameWidth() {
+    LOGV("clipVideoFrameWidth: encoder %d", mVideoEncoder);
+    int minFrameWidth = mEncoderProfiles->getVideoEncoderParamByName(
+                        "enc.vid.width.min", mVideoEncoder);
+    int maxFrameWidth = mEncoderProfiles->getVideoEncoderParamByName(
+                        "enc.vid.width.max", mVideoEncoder);
+    if (mVideoWidth < minFrameWidth) {
+        LOGW("Intended video encoding frame width (%d) is too small"
+             " and will be set to (%d)", mVideoWidth, minFrameWidth);
+        mVideoWidth = minFrameWidth;
+    } else if (mVideoWidth > maxFrameWidth) {
+        LOGW("Intended video encoding frame width (%d) is too large"
+             " and will be set to (%d)", mVideoWidth, maxFrameWidth);
+        mVideoWidth = maxFrameWidth;
+    }
+}
+
+status_t GonkRecorder::checkVideoEncoderCapabilities() {
+        // Dont clip for time lapse capture as encoder will have enough
+        // time to encode because of slow capture rate of time lapse.
+        clipVideoBitRate();
+        clipVideoFrameRate();
+        clipVideoFrameWidth();
+        clipVideoFrameHeight();
+        setDefaultProfileIfNecessary();
+    return OK;
+}
+
+// Set to use AVC baseline profile if the encoding parameters matches
+// CAMCORDER_QUALITY_LOW profile; this is for the sake of MMS service.
+void GonkRecorder::setDefaultProfileIfNecessary() {
+    LOGV("setDefaultProfileIfNecessary");
+
+    camcorder_quality quality = CAMCORDER_QUALITY_LOW;
+
+    int64_t durationUs   = mEncoderProfiles->getCamcorderProfileParamByName(
+                                "duration", mCameraId, quality) * 1000000LL;
+
+    int fileFormat       = mEncoderProfiles->getCamcorderProfileParamByName(
+                                "file.format", mCameraId, quality);
+
+    int videoCodec       = mEncoderProfiles->getCamcorderProfileParamByName(
+                                "vid.codec", mCameraId, quality);
+
+    int videoBitRate     = mEncoderProfiles->getCamcorderProfileParamByName(
+                                "vid.bps", mCameraId, quality);
+
+    int videoFrameRate   = mEncoderProfiles->getCamcorderProfileParamByName(
+                                "vid.fps", mCameraId, quality);
+
+    int videoFrameWidth  = mEncoderProfiles->getCamcorderProfileParamByName(
+                                "vid.width", mCameraId, quality);
+
+    int videoFrameHeight = mEncoderProfiles->getCamcorderProfileParamByName(
+                                "vid.height", mCameraId, quality);
+
+    int audioCodec       = mEncoderProfiles->getCamcorderProfileParamByName(
+                                "aud.codec", mCameraId, quality);
+
+    int audioBitRate     = mEncoderProfiles->getCamcorderProfileParamByName(
+                                "aud.bps", mCameraId, quality);
+
+    int audioSampleRate  = mEncoderProfiles->getCamcorderProfileParamByName(
+                                "aud.hz", mCameraId, quality);
+
+    int audioChannels    = mEncoderProfiles->getCamcorderProfileParamByName(
+                                "aud.ch", mCameraId, quality);
+
+    if (durationUs == mMaxFileDurationUs &&
+        fileFormat == mOutputFormat &&
+        videoCodec == mVideoEncoder &&
+        videoBitRate == mVideoBitRate &&
+        videoFrameRate == mFrameRate &&
+        videoFrameWidth == mVideoWidth &&
+        videoFrameHeight == mVideoHeight &&
+        audioCodec == mAudioEncoder &&
+        audioBitRate == mAudioBitRate &&
+        audioSampleRate == mSampleRate &&
+        audioChannels == mAudioChannels) {
+        if (videoCodec == VIDEO_ENCODER_H264) {
+            LOGI("Force to use AVC baseline profile");
+            setParamVideoEncoderProfile(OMX_VIDEO_AVCProfileBaseline);
+        }
+    }
+}
+
+status_t GonkRecorder::checkAudioEncoderCapabilities() {
+    clipAudioBitRate();
+    clipAudioSampleRate();
+    clipNumberOfAudioChannels();
+    return OK;
+}
+
+void GonkRecorder::clipAudioBitRate() {
+    LOGV("clipAudioBitRate: encoder %d", mAudioEncoder);
+
+    int minAudioBitRate =
+            mEncoderProfiles->getAudioEncoderParamByName(
+                "enc.aud.bps.min", mAudioEncoder);
+    if (mAudioBitRate < minAudioBitRate) {
+        LOGW("Intended audio encoding bit rate (%d) is too small"
+            " and will be set to (%d)", mAudioBitRate, minAudioBitRate);
+        mAudioBitRate = minAudioBitRate;
+    }
+
+    int maxAudioBitRate =
+            mEncoderProfiles->getAudioEncoderParamByName(
+                "enc.aud.bps.max", mAudioEncoder);
+    if (mAudioBitRate > maxAudioBitRate) {
+        LOGW("Intended audio encoding bit rate (%d) is too large"
+            " and will be set to (%d)", mAudioBitRate, maxAudioBitRate);
+        mAudioBitRate = maxAudioBitRate;
+    }
+}
+
+void GonkRecorder::clipAudioSampleRate() {
+    LOGV("clipAudioSampleRate: encoder %d", mAudioEncoder);
+
+    int minSampleRate =
+            mEncoderProfiles->getAudioEncoderParamByName(
+                "enc.aud.hz.min", mAudioEncoder);
+    if (mSampleRate < minSampleRate) {
+        LOGW("Intended audio sample rate (%d) is too small"
+            " and will be set to (%d)", mSampleRate, minSampleRate);
+        mSampleRate = minSampleRate;
+    }
+
+    int maxSampleRate =
+            mEncoderProfiles->getAudioEncoderParamByName(
+                "enc.aud.hz.max", mAudioEncoder);
+    if (mSampleRate > maxSampleRate) {
+        LOGW("Intended audio sample rate (%d) is too large"
+            " and will be set to (%d)", mSampleRate, maxSampleRate);
+        mSampleRate = maxSampleRate;
+    }
+}
+
+void GonkRecorder::clipNumberOfAudioChannels() {
+    LOGV("clipNumberOfAudioChannels: encoder %d", mAudioEncoder);
+
+    int minChannels =
+            mEncoderProfiles->getAudioEncoderParamByName(
+                "enc.aud.ch.min", mAudioEncoder);
+    if (mAudioChannels < minChannels) {
+        LOGW("Intended number of audio channels (%d) is too small"
+            " and will be set to (%d)", mAudioChannels, minChannels);
+        mAudioChannels = minChannels;
+    }
+
+    int maxChannels =
+            mEncoderProfiles->getAudioEncoderParamByName(
+                "enc.aud.ch.max", mAudioEncoder);
+    if (mAudioChannels > maxChannels) {
+        LOGW("Intended number of audio channels (%d) is too large"
+            " and will be set to (%d)", mAudioChannels, maxChannels);
+        mAudioChannels = maxChannels;
+    }
+}
+
+void GonkRecorder::clipVideoFrameHeight() {
+    LOGV("clipVideoFrameHeight: encoder %d", mVideoEncoder);
+    int minFrameHeight = mEncoderProfiles->getVideoEncoderParamByName(
+                        "enc.vid.height.min", mVideoEncoder);
+    int maxFrameHeight = mEncoderProfiles->getVideoEncoderParamByName(
+                        "enc.vid.height.max", mVideoEncoder);
+    if (mVideoHeight < minFrameHeight) {
+        LOGW("Intended video encoding frame height (%d) is too small"
+             " and will be set to (%d)", mVideoHeight, minFrameHeight);
+        mVideoHeight = minFrameHeight;
+    } else if (mVideoHeight > maxFrameHeight) {
+        LOGW("Intended video encoding frame height (%d) is too large"
+             " and will be set to (%d)", mVideoHeight, maxFrameHeight);
+        mVideoHeight = maxFrameHeight;
+    }
+}
+
+// Set up the appropriate MediaSource depending on the chosen option
+status_t GonkRecorder::setupMediaSource(
+                      sp<MediaSource> *mediaSource) {
+    if (mVideoSource == VIDEO_SOURCE_DEFAULT
+            || mVideoSource == VIDEO_SOURCE_CAMERA) {
+        sp<GonkCameraSource> cameraSource;
+        status_t err = setupCameraSource(&cameraSource);
+        if (err != OK) {
+            return err;
+        }
+        *mediaSource = cameraSource;
+    } else if (mVideoSource == VIDEO_SOURCE_GRALLOC_BUFFER) {
+        return BAD_VALUE;
+    } else {
+        return INVALID_OPERATION;
+    }
+    return OK;
+}
+
+status_t GonkRecorder::setupCameraSource(
+        sp<GonkCameraSource> *cameraSource) {
+    status_t err = OK;
+    if ((err = checkVideoEncoderCapabilities()) != OK) {
+        return err;
+    }
+    Size videoSize;
+    videoSize.width = mVideoWidth;
+    videoSize.height = mVideoHeight;
+    bool useMeta = true;
+    char value[PROPERTY_VALUE_MAX];
+    if (property_get("debug.camcorder.disablemeta", value, NULL) &&
+            atoi(value)) {
+      useMeta = false;
+    }
+
+    *cameraSource = GonkCameraSource::Create(
+                mCameraHandle, videoSize, mFrameRate, useMeta);
+    if (*cameraSource == NULL) {
+        return UNKNOWN_ERROR;
+    }
+
+    if ((*cameraSource)->initCheck() != OK) {
+        (*cameraSource).clear();
+        *cameraSource = NULL;
+        return NO_INIT;
+    }
+
+    // When frame rate is not set, the actual frame rate will be set to
+    // the current frame rate being used.
+    if (mFrameRate == -1) {
+        int32_t frameRate = 0;
+        CHECK ((*cameraSource)->getFormat()->findInt32(
+                    kKeyFrameRate, &frameRate));
+        LOGI("Frame rate is not explicitly set. Use the current frame "
+             "rate (%d fps)", frameRate);
+        mFrameRate = frameRate;
+    }
+
+    CHECK(mFrameRate != -1);
+
+    mIsMetaDataStoredInVideoBuffers =
+        (*cameraSource)->isMetaDataStoredInVideoBuffers();
+
+    return OK;
+}
+
+status_t GonkRecorder::setupVideoEncoder(
+        sp<MediaSource> cameraSource,
+        int32_t videoBitRate,
+        sp<MediaSource> *source) {
+    source->clear();
+
+    sp<MetaData> enc_meta = new MetaData;
+    enc_meta->setInt32(kKeyBitRate, videoBitRate);
+    enc_meta->setInt32(kKeyFrameRate, mFrameRate);
+
+    switch (mVideoEncoder) {
+        case VIDEO_ENCODER_H263:
+            enc_meta->setCString(kKeyMIMEType, MEDIA_MIMETYPE_VIDEO_H263);
+            break;
+
+        case VIDEO_ENCODER_MPEG_4_SP:
+            enc_meta->setCString(kKeyMIMEType, MEDIA_MIMETYPE_VIDEO_MPEG4);
+            break;
+
+        case VIDEO_ENCODER_H264:
+            enc_meta->setCString(kKeyMIMEType, MEDIA_MIMETYPE_VIDEO_AVC);
+            break;
+
+        default:
+            CHECK(!"Should not be here, unsupported video encoding.");
+            break;
+    }
+
+    sp<MetaData> meta = cameraSource->getFormat();
+
+    int32_t width, height, stride, sliceHeight, colorFormat;
+    CHECK(meta->findInt32(kKeyWidth, &width));
+    CHECK(meta->findInt32(kKeyHeight, &height));
+    CHECK(meta->findInt32(kKeyStride, &stride));
+    CHECK(meta->findInt32(kKeySliceHeight, &sliceHeight));
+    CHECK(meta->findInt32(kKeyColorFormat, &colorFormat));
+
+    enc_meta->setInt32(kKeyWidth, width);
+    enc_meta->setInt32(kKeyHeight, height);
+    enc_meta->setInt32(kKeyIFramesInterval, mIFramesIntervalSec);
+    enc_meta->setInt32(kKeyStride, stride);
+    enc_meta->setInt32(kKeySliceHeight, sliceHeight);
+    enc_meta->setInt32(kKeyColorFormat, colorFormat);
+    if (mVideoTimeScale > 0) {
+        enc_meta->setInt32(kKeyTimeScale, mVideoTimeScale);
+    }
+
+    /*
+     * can set profile from the app as a parameter.
+     * For the mean time, set from shell
+     */
+
+    char value[PROPERTY_VALUE_MAX];
+    bool customProfile = false;
+
+    if (property_get("encoder.video.profile", value, NULL) > 0) {
+        customProfile = true;
+    }
+
+    if (customProfile) {
+        switch ( mVideoEncoder ) {
+        case VIDEO_ENCODER_H264:
+            if (strncmp("base", value, 4) == 0) {
+                mVideoEncoderProfile = OMX_VIDEO_AVCProfileBaseline;
+                LOGI("H264 Baseline Profile");
+            }
+            else if (strncmp("main", value, 4) == 0) {
+                mVideoEncoderProfile = OMX_VIDEO_AVCProfileMain;
+                LOGI("H264 Main Profile");
+            }
+            else if (strncmp("high", value, 4) == 0) {
+                mVideoEncoderProfile = OMX_VIDEO_AVCProfileHigh;
+                LOGI("H264 High Profile");
+            }
+            else {
+               LOGW("Unsupported H264 Profile");
+            }
+            break;
+        case VIDEO_ENCODER_MPEG_4_SP:
+            if (strncmp("simple", value, 5) == 0 ) {
+                mVideoEncoderProfile = OMX_VIDEO_MPEG4ProfileSimple;
+                LOGI("MPEG4 Simple profile");
+            }
+            else if (strncmp("asp", value, 3) == 0 ) {
+                mVideoEncoderProfile = OMX_VIDEO_MPEG4ProfileAdvancedSimple;
+                LOGI("MPEG4 Advanced Simple Profile");
+            }
+            else {
+                LOGW("Unsupported MPEG4 Profile");
+            }
+            break;
+        default:
+            LOGW("No custom profile support for other codecs");
+            break;
+        }
+    }
+
+    if (mVideoEncoderProfile != -1) {
+        enc_meta->setInt32(kKeyVideoProfile, mVideoEncoderProfile);
+    }
+    if (mVideoEncoderLevel != -1) {
+        enc_meta->setInt32(kKeyVideoLevel, mVideoEncoderLevel);
+    }
+
+    uint32_t encoder_flags = 0;
+    if (mIsMetaDataStoredInVideoBuffers) {
+        LOGW("Camera source supports metadata mode, create OMXCodec for metadata");
+        encoder_flags |= OMXCodec::kHardwareCodecsOnly;
+        encoder_flags |= OMXCodec::kStoreMetaDataInVideoBuffers;
+        encoder_flags |= OMXCodec::kOnlySubmitOneInputBufferAtOneTime;
+    }
+
+    sp<MediaSource> encoder = OMXCodec::Create(
+            GetOMX(),
+            enc_meta,
+            true /* createEncoder */, cameraSource,
+            NULL, encoder_flags);
+    if (encoder == NULL) {
+        LOGW("Failed to create the encoder");
+        // When the encoder fails to be created, we need
+        // release the camera source due to the camera's lock
+        // and unlock mechanism.
+        cameraSource->stop();
+        return UNKNOWN_ERROR;
+    }
+
+    *source = encoder;
+
+    return OK;
+}
+
+status_t GonkRecorder::setupAudioEncoder(const sp<MediaWriter>& writer) {
+    status_t status = BAD_VALUE;
+    if (OK != (status = checkAudioEncoderCapabilities())) {
+        return status;
+    }
+
+    switch(mAudioEncoder) {
+        case AUDIO_ENCODER_AMR_NB:
+        case AUDIO_ENCODER_AMR_WB:
+        case AUDIO_ENCODER_AAC:
+            break;
+
+        default:
+            LOGE("Unsupported audio encoder: %d", mAudioEncoder);
+            return UNKNOWN_ERROR;
+    }
+
+    sp<MediaSource> audioEncoder = createAudioSource();
+    if (audioEncoder == NULL) {
+        return UNKNOWN_ERROR;
+    }
+
+    writer->addSource(audioEncoder);
+    return OK;
+}
+
+status_t GonkRecorder::setupMPEG4Recording(
+        int outputFd,
+        int32_t videoWidth, int32_t videoHeight,
+        int32_t videoBitRate,
+        int32_t *totalBitRate,
+        sp<MediaWriter> *mediaWriter) {
+    mediaWriter->clear();
+    *totalBitRate = 0;
+    status_t err = OK;
+    sp<MediaWriter> writer = new MPEG4Writer(outputFd);
+
+    if (mVideoSource < VIDEO_SOURCE_LIST_END) {
+
+        sp<MediaSource> mediaSource;
+        err = setupMediaSource(&mediaSource);
+        if (err != OK) {
+            return err;
+        }
+
+        sp<MediaSource> encoder;
+        err = setupVideoEncoder(mediaSource, videoBitRate, &encoder);
+        if (err != OK) {
+            return err;
+        }
+
+        writer->addSource(encoder);
+        *totalBitRate += videoBitRate;
+    }
+
+    // Audio source is added at the end if it exists.
+    // This help make sure that the "recoding" sound is suppressed for
+    // camcorder applications in the recorded files.
+    if (mAudioSource != AUDIO_SOURCE_CNT) {
+        err = setupAudioEncoder(writer);
+        if (err != OK) return err;
+        *totalBitRate += mAudioBitRate;
+    }
+
+    if (mInterleaveDurationUs > 0) {
+        reinterpret_cast<MPEG4Writer *>(writer.get())->
+            setInterleaveDuration(mInterleaveDurationUs);
+    }
+    if (mLongitudex10000 > -3600000 && mLatitudex10000 > -3600000) {
+        reinterpret_cast<MPEG4Writer *>(writer.get())->
+            setGeoData(mLatitudex10000, mLongitudex10000);
+    }
+    if (mMaxFileDurationUs != 0) {
+        writer->setMaxFileDuration(mMaxFileDurationUs);
+    }
+    if (mMaxFileSizeBytes != 0) {
+        writer->setMaxFileSize(mMaxFileSizeBytes);
+    }
+
+    mStartTimeOffsetMs = mEncoderProfiles->getStartTimeOffsetMs(mCameraId);
+    if (mStartTimeOffsetMs > 0) {
+        reinterpret_cast<MPEG4Writer *>(writer.get())->
+            setStartTimeOffsetMs(mStartTimeOffsetMs);
+    }
+
+    writer->setListener(mListener);
+    *mediaWriter = writer;
+    return OK;
+}
+
+void GonkRecorder::setupMPEG4MetaData(int64_t startTimeUs, int32_t totalBitRate,
+        sp<MetaData> *meta) {
+    (*meta)->setInt64(kKeyTime, startTimeUs);
+    (*meta)->setInt32(kKeyFileType, mOutputFormat);
+    (*meta)->setInt32(kKeyBitRate, totalBitRate);
+    (*meta)->setInt32(kKey64BitFileOffset, mUse64BitFileOffset);
+    if (mMovieTimeScale > 0) {
+        (*meta)->setInt32(kKeyTimeScale, mMovieTimeScale);
+    }
+    if (mTrackEveryTimeDurationUs > 0) {
+        (*meta)->setInt64(kKeyTrackTimeStatus, mTrackEveryTimeDurationUs);
+    }
+
+    char value[PROPERTY_VALUE_MAX];
+    if (property_get("debug.camcorder.rotation", value, 0) > 0 && atoi(value) >= 0) {
+        mRotationDegrees = atoi(value);
+        LOGI("Setting rotation to %d", mRotationDegrees );
+    }
+
+    if (mRotationDegrees != 0) {
+        (*meta)->setInt32(kKeyRotation, mRotationDegrees);
+    }
+}
+
+status_t GonkRecorder::startMPEG4Recording() {
+    int32_t totalBitRate;
+    status_t err = setupMPEG4Recording(
+            mOutputFd, mVideoWidth, mVideoHeight,
+            mVideoBitRate, &totalBitRate, &mWriter);
+    if (err != OK) {
+        return err;
+    }
+
+    //systemTime() doesn't give correct time because
+    //HAVE_POSIX_CLOCKS is not defined for utils/Timers.cpp
+    //so, using clock_gettime directly
+#include <time.h>
+    struct timespec t;
+    clock_gettime(CLOCK_MONOTONIC, &t);
+    int64_t startTimeUs = int64_t(t.tv_sec)*1000000000LL + t.tv_nsec;
+    startTimeUs = startTimeUs / 1000;
+    sp<MetaData> meta = new MetaData;
+    setupMPEG4MetaData(startTimeUs, totalBitRate, &meta);
+
+    err = mWriter->start(meta.get());
+    if (err != OK) {
+        return err;
+    }
+
+    return OK;
+}
+
+status_t GonkRecorder::pause() {
+    LOGV("pause");
+    if (mWriter == NULL) {
+        return UNKNOWN_ERROR;
+    }
+    mWriter->pause();
+
+    if (mStarted) {
+        mStarted = false;
+    }
+
+
+    return OK;
+}
+
+status_t GonkRecorder::stop() {
+    LOGV("stop");
+    status_t err = OK;
+
+    if (mWriter != NULL) {
+        err = mWriter->stop();
+        mWriter.clear();
+    }
+
+    if (mOutputFd >= 0) {
+        ::close(mOutputFd);
+        mOutputFd = -1;
+    }
+
+    if (mStarted) {
+        mStarted = false;
+    }
+
+
+    return err;
+}
+
+status_t GonkRecorder::close() {
+    LOGV("close");
+    stop();
+
+    return OK;
+}
+
+status_t GonkRecorder::reset() {
+    LOGV("reset");
+    stop();
+
+    // No audio or video source by default
+    mAudioSource = AUDIO_SOURCE_CNT;
+    mVideoSource = VIDEO_SOURCE_LIST_END;
+
+    // Default parameters
+    mOutputFormat  = OUTPUT_FORMAT_THREE_GPP;
+    mAudioEncoder  = AUDIO_ENCODER_AMR_NB;
+    mVideoEncoder  = VIDEO_ENCODER_H263;
+    mVideoWidth    = 176;
+    mVideoHeight   = 144;
+    mFrameRate     = -1;
+    mVideoBitRate  = 192000;
+    mSampleRate    = 8000;
+    mAudioChannels = 1;
+    mAudioBitRate  = 12200;
+    mInterleaveDurationUs = 0;
+    mIFramesIntervalSec = 2;
+    mAudioSourceNode = 0;
+    mUse64BitFileOffset = false;
+    mMovieTimeScale  = -1;
+    mAudioTimeScale  = -1;
+    mVideoTimeScale  = -1;
+    mCameraId        = 0;
+    mStartTimeOffsetMs = -1;
+    mVideoEncoderProfile = -1;
+    mVideoEncoderLevel   = -1;
+    mMaxFileDurationUs = 0;
+    mMaxFileSizeBytes = 0;
+    mTrackEveryTimeDurationUs = 0;
+    mIsMetaDataStoredInVideoBuffers = false;
+    mEncoderProfiles = MediaProfiles::getInstance();
+    mRotationDegrees = 0;
+    mLatitudex10000 = -3600000;
+    mLongitudex10000 = -3600000;
+
+    mOutputFd = -1;
+    mCameraHandle = -1;
+    //TODO: May need to register a listener eventually
+    //if someone is interested in recorder events for now
+    //default to no listener registered
+    mListener = NULL;
+
+    // Disable Audio Encoding
+    char value[PROPERTY_VALUE_MAX];
+    property_get("camcorder.debug.disableaudio", value, "0");
+    if(atoi(value)) mDisableAudio = true;
+
+    return OK;
+}
+
+status_t GonkRecorder::getMaxAmplitude(int *max) {
+    LOGV("getMaxAmplitude");
+
+    if (max == NULL) {
+        LOGE("Null pointer argument");
+        return BAD_VALUE;
+    }
+
+    if (mAudioSourceNode != 0) {
+        *max = mAudioSourceNode->getMaxAmplitude();
+    } else {
+        *max = 0;
+    }
+
+    return OK;
+}
+
+status_t GonkRecorder::dump(
+        int fd, const Vector<String16>& args) const {
+    LOGV("dump");
+    const size_t SIZE = 256;
+    char buffer[SIZE];
+    String8 result;
+    if (mWriter != 0) {
+        mWriter->dump(fd, args);
+    } else {
+        snprintf(buffer, SIZE, "   No file writer\n");
+        result.append(buffer);
+    }
+    snprintf(buffer, SIZE, "   Recorder: %p\n", this);
+    snprintf(buffer, SIZE, "   Output file (fd %d):\n", mOutputFd);
+    result.append(buffer);
+    snprintf(buffer, SIZE, "     File format: %d\n", mOutputFormat);
+    result.append(buffer);
+    snprintf(buffer, SIZE, "     Max file size (bytes): %lld\n", mMaxFileSizeBytes);
+    result.append(buffer);
+    snprintf(buffer, SIZE, "     Max file duration (us): %lld\n", mMaxFileDurationUs);
+    result.append(buffer);
+    snprintf(buffer, SIZE, "     File offset length (bits): %d\n", mUse64BitFileOffset? 64: 32);
+    result.append(buffer);
+    snprintf(buffer, SIZE, "     Interleave duration (us): %d\n", mInterleaveDurationUs);
+    result.append(buffer);
+    snprintf(buffer, SIZE, "     Progress notification: %lld us\n", mTrackEveryTimeDurationUs);
+    result.append(buffer);
+    snprintf(buffer, SIZE, "   Audio\n");
+    result.append(buffer);
+    snprintf(buffer, SIZE, "     Source: %d\n", mAudioSource);
+    result.append(buffer);
+    snprintf(buffer, SIZE, "     Encoder: %d\n", mAudioEncoder);
+    result.append(buffer);
+    snprintf(buffer, SIZE, "     Bit rate (bps): %d\n", mAudioBitRate);
+    result.append(buffer);
+    snprintf(buffer, SIZE, "     Sampling rate (hz): %d\n", mSampleRate);
+    result.append(buffer);
+    snprintf(buffer, SIZE, "     Number of channels: %d\n", mAudioChannels);
+    result.append(buffer);
+    snprintf(buffer, SIZE, "     Max amplitude: %d\n", mAudioSourceNode == 0? 0: mAudioSourceNode->getMaxAmplitude());
+    result.append(buffer);
+    snprintf(buffer, SIZE, "   Video\n");
+    result.append(buffer);
+    snprintf(buffer, SIZE, "     Source: %d\n", mVideoSource);
+    result.append(buffer);
+    snprintf(buffer, SIZE, "     Camera Id: %d\n", mCameraId);
+    result.append(buffer);
+    snprintf(buffer, SIZE, "     Camera Handle: %d\n", mCameraHandle);
+    result.append(buffer);
+    snprintf(buffer, SIZE, "     Start time offset (ms): %d\n", mStartTimeOffsetMs);
+    result.append(buffer);
+    snprintf(buffer, SIZE, "     Encoder: %d\n", mVideoEncoder);
+    result.append(buffer);
+    snprintf(buffer, SIZE, "     Encoder profile: %d\n", mVideoEncoderProfile);
+    result.append(buffer);
+    snprintf(buffer, SIZE, "     Encoder level: %d\n", mVideoEncoderLevel);
+    result.append(buffer);
+    snprintf(buffer, SIZE, "     I frames interval (s): %d\n", mIFramesIntervalSec);
+    result.append(buffer);
+    snprintf(buffer, SIZE, "     Frame size (pixels): %dx%d\n", mVideoWidth, mVideoHeight);
+    result.append(buffer);
+    snprintf(buffer, SIZE, "     Frame rate (fps): %d\n", mFrameRate);
+    result.append(buffer);
+    snprintf(buffer, SIZE, "     Bit rate (bps): %d\n", mVideoBitRate);
+    result.append(buffer);
+    ::write(fd, result.string(), result.size());
+    return OK;
+}
+
+status_t GonkRecorder::setCameraHandle(int32_t handle) {
+  if (handle < 0) {
+    return BAD_VALUE;
+  }
+  mCameraHandle = handle;
+  return OK;
+}
+
+}  // namespace android
new file mode 100644
--- /dev/null
+++ b/dom/camera/GonkRecorder.h
@@ -0,0 +1,174 @@
+/*
+ * Copyright (C) 2009 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef GONK_RECORDER_H_
+
+#define GONK_RECORDER_H_
+
+#include <media/mediarecorder.h>
+#include <camera/CameraParameters.h>
+#include <utils/String8.h>
+
+#include <system/audio.h>
+
+namespace android {
+
+class GonkCameraSource;
+struct MediaSource;
+struct MediaWriter;
+class MetaData;
+struct AudioSource;
+class MediaProfiles;
+
+struct GonkRecorder {
+    GonkRecorder();
+    virtual ~GonkRecorder();
+
+    virtual status_t init();
+    virtual status_t setAudioSource(audio_source_t as);
+    virtual status_t setVideoSource(video_source vs);
+    virtual status_t setOutputFormat(output_format of);
+    virtual status_t setAudioEncoder(audio_encoder ae);
+    virtual status_t setVideoEncoder(video_encoder ve);
+    virtual status_t setVideoSize(int width, int height);
+    virtual status_t setVideoFrameRate(int frames_per_second);
+    virtual status_t setOutputFile(const char *path);
+    virtual status_t setOutputFile(int fd, int64_t offset, int64_t length);
+    virtual status_t setParameters(const String8& params);
+    virtual status_t setCameraHandle(int32_t handle);
+    virtual status_t setListener(const sp<IMediaRecorderClient>& listener);
+    virtual status_t prepare();
+    virtual status_t start();
+    virtual status_t pause();
+    virtual status_t stop();
+    virtual status_t close();
+    virtual status_t reset();
+    virtual status_t getMaxAmplitude(int *max);
+    virtual status_t dump(int fd, const Vector<String16>& args) const;
+    // Querying a SurfaceMediaSourcer
+
+private:
+    sp<IMediaRecorderClient> mListener;
+    sp<MediaWriter> mWriter;
+    int mOutputFd;
+    sp<AudioSource> mAudioSourceNode;
+
+    audio_source_t mAudioSource;
+    video_source mVideoSource;
+    output_format mOutputFormat;
+    audio_encoder mAudioEncoder;
+    video_encoder mVideoEncoder;
+    bool mUse64BitFileOffset;
+    int32_t mVideoWidth, mVideoHeight;
+    int32_t mFrameRate;
+    int32_t mVideoBitRate;
+    int32_t mAudioBitRate;
+    int32_t mAudioChannels;
+    int32_t mSampleRate;
+    int32_t mInterleaveDurationUs;
+    int32_t mIFramesIntervalSec;
+    int32_t mCameraId;
+    int32_t mVideoEncoderProfile;
+    int32_t mVideoEncoderLevel;
+    int32_t mMovieTimeScale;
+    int32_t mVideoTimeScale;
+    int32_t mAudioTimeScale;
+    int64_t mMaxFileSizeBytes;
+    int64_t mMaxFileDurationUs;
+    int64_t mTrackEveryTimeDurationUs;
+    int32_t mRotationDegrees;  // Clockwise
+    int32_t mLatitudex10000;
+    int32_t mLongitudex10000;
+    int32_t mStartTimeOffsetMs;
+
+    String8 mParams;
+
+    bool mIsMetaDataStoredInVideoBuffers;
+    MediaProfiles *mEncoderProfiles;
+
+    bool mStarted;
+    // Needed when GLFrames are encoded.
+    // An <ISurfaceTexture> pointer
+    // will be sent to the client side using which the
+    // frame buffers will be queued and dequeued
+    bool mDisableAudio;
+    int32_t mCameraHandle;
+
+    status_t setupMPEG4Recording(
+        int outputFd,
+        int32_t videoWidth, int32_t videoHeight,
+        int32_t videoBitRate,
+        int32_t *totalBitRate,
+        sp<MediaWriter> *mediaWriter);
+    void setupMPEG4MetaData(int64_t startTimeUs, int32_t totalBitRate,
+        sp<MetaData> *meta);
+    status_t startMPEG4Recording();
+    status_t startAMRRecording();
+    status_t startRawAudioRecording();
+    status_t startMPEG2TSRecording();
+    sp<MediaSource> createAudioSource();
+    status_t checkVideoEncoderCapabilities();
+    status_t checkAudioEncoderCapabilities();
+    // Generic MediaSource set-up. Returns the appropriate
+    // source (CameraSource or SurfaceMediaSource)
+    // depending on the videosource type
+    status_t setupMediaSource(sp<MediaSource> *mediaSource);
+    status_t setupCameraSource(sp<GonkCameraSource> *cameraSource);
+    // setup the surfacemediasource for the encoder
+
+    status_t setupAudioEncoder(const sp<MediaWriter>& writer);
+    status_t setupVideoEncoder(
+            sp<MediaSource> cameraSource,
+            int32_t videoBitRate,
+            sp<MediaSource> *source);
+
+    // Encoding parameter handling utilities
+    status_t setParameter(const String8 &key, const String8 &value);
+    status_t setParamAudioEncodingBitRate(int32_t bitRate);
+    status_t setParamAudioNumberOfChannels(int32_t channles);
+    status_t setParamAudioSamplingRate(int32_t sampleRate);
+    status_t setParamAudioTimeScale(int32_t timeScale);
+    status_t setParamVideoEncodingBitRate(int32_t bitRate);
+    status_t setParamVideoIFramesInterval(int32_t seconds);
+    status_t setParamVideoEncoderProfile(int32_t profile);
+    status_t setParamVideoEncoderLevel(int32_t level);
+    status_t setParamVideoCameraId(int32_t cameraId);
+    status_t setParamVideoTimeScale(int32_t timeScale);
+    status_t setParamVideoRotation(int32_t degrees);
+    status_t setParamTrackTimeStatus(int64_t timeDurationUs);
+    status_t setParamInterleaveDuration(int32_t durationUs);
+    status_t setParam64BitFileOffset(bool use64BitFileOffset);
+    status_t setParamMaxFileDurationUs(int64_t timeUs);
+    status_t setParamMaxFileSizeBytes(int64_t bytes);
+    status_t setParamMovieTimeScale(int32_t timeScale);
+    status_t setParamGeoDataLongitude(int64_t longitudex10000);
+    status_t setParamGeoDataLatitude(int64_t latitudex10000);
+    void clipVideoBitRate();
+    void clipVideoFrameRate();
+    void clipVideoFrameWidth();
+    void clipVideoFrameHeight();
+    void clipAudioBitRate();
+    void clipAudioSampleRate();
+    void clipNumberOfAudioChannels();
+    void setDefaultProfileIfNecessary();
+
+    GonkRecorder(const GonkRecorder &);
+    GonkRecorder &operator=(const GonkRecorder &);
+};
+
+}  // namespace android
+
+#endif  // GONK_RECORDER_H_
--- a/dom/camera/ICameraControl.h
+++ b/dom/camera/ICameraControl.h
@@ -1,16 +1,17 @@
 /* This Source Code Form is subject to the terms of the Mozilla Public
  * License, v. 2.0. If a copy of the MPL was not distributed with this file,
  * You can obtain one at http://mozilla.org/MPL/2.0/. */
 
 #ifndef DOM_CAMERA_ICAMERACONTROL_H
 #define DOM_CAMERA_ICAMERACONTROL_H
 
 #include "jsapi.h"
+#include "nsIDOMDeviceStorage.h"
 #include "nsIDOMCameraManager.h"
 #include "DictionaryHelpers.h"
 #include "CameraCommon.h"
 
 namespace mozilla {
 
 using namespace dom;
 
@@ -21,18 +22,19 @@ class ICameraControl
 public:
   NS_INLINE_DECL_THREADSAFE_REFCOUNTING(ICameraControl)
 
   virtual nsresult GetPreviewStream(CameraSize aSize, nsICameraPreviewStreamCallback* onSuccess, nsICameraErrorCallback* onError) = 0;
   virtual nsresult StartPreview(DOMCameraPreview* aDOMPreview) = 0;
   virtual void StopPreview() = 0;
   virtual nsresult AutoFocus(nsICameraAutoFocusCallback* onSuccess, nsICameraErrorCallback* onError) = 0;
   virtual nsresult TakePicture(CameraSize aSize, int32_t aRotation, const nsAString& aFileFormat, CameraPosition aPosition, nsICameraTakePictureCallback* onSuccess, nsICameraErrorCallback* onError) = 0;
-  virtual nsresult StartRecording(CameraSize aSize, nsICameraStartRecordingCallback* onSuccess, nsICameraErrorCallback* onError) = 0;
+  virtual nsresult StartRecording(nsIDOMDeviceStorage* aStorageArea, const nsAString& aFilename, nsICameraStartRecordingCallback* onSuccess, nsICameraErrorCallback* onError) = 0;
   virtual nsresult StopRecording() = 0;
+  virtual nsresult GetPreviewStreamVideoMode(CameraRecordingOptions* aOptions, nsICameraPreviewStreamCallback* onSuccess, nsICameraErrorCallback* onError) = 0;
 
   virtual nsresult Set(uint32_t aKey, const nsAString& aValue) = 0;
   virtual nsresult Get(uint32_t aKey, nsAString& aValue) = 0;
   virtual nsresult Set(uint32_t aKey, double aValue) = 0;
   virtual nsresult Get(uint32_t aKey, double* aValue) = 0;
   virtual nsresult Set(JSContext* aCx, uint32_t aKey, const JS::Value& aValue, uint32_t aLimit) = 0;
   virtual nsresult Get(JSContext* aCx, uint32_t aKey, JS::Value* aValue) = 0;
   virtual nsresult SetFocusAreas(JSContext* aCx, const JS::Value& aValue) = 0;
--- a/dom/camera/Makefile.in
+++ b/dom/camera/Makefile.in
@@ -27,16 +27,19 @@ CPPSRCS = \
   $(NULL)
 
 ifeq ($(MOZ_B2G_CAMERA),1)
 CPPSRCS += \
   GonkCameraManager.cpp \
   GonkCameraControl.cpp \
   GonkCameraHwMgr.cpp \
   GonkNativeWindow.cpp \
+  GonkRecorder.cpp \
+  GonkCameraSource.cpp \
+  AudioParameter.cpp \
   $(NULL)
 else ifeq (gonk,$(MOZ_WIDGET_TOOLKIT))
 CPPSRCS += \
   FallbackCameraManager.cpp \
   FallbackCameraControl.cpp \
   GonkNativeWindow.cpp \
   $(NULL)
 else
new file mode 100644
--- /dev/null
+++ b/dom/camera/README
@@ -0,0 +1,28 @@
+This README file details from where some of the camcorder source files were derived from and how to apply the provided patch file to get the updated files for B2G.
+---------------------------------
+
+Following is the list of B2G files which were derived from an android ics_chocolate build. It also shows the corresponding locations where the original source files can be found:
+
+GonkRecoder.cpp:
+https://www.codeaurora.org/gitweb/quic/la/?p=platform/frameworks/base.git;a=blob;f=media/libmediaplayerservice/StagefrightRecorder.cpp;hb=ef1672482a9c2b88d8017927df68144fee42626c
+
+GonkRecorder.h:
+https://www.codeaurora.org/gitweb/quic/la/?p=platform/frameworks/base.git;a=blob;f=media/libmediaplayerservice/StagefrightRecorder.h;hb=e3682213bcd3fe43b059e00f0fe4dbebc3f3c35d
+
+GonkCameraSource.cpp:
+https://www.codeaurora.org/gitweb/quic/la/?p=platform/frameworks/base.git;a=blob;f=media/libstagefright/CameraSource.cpp;hb=7fa677babfee9c241a131b22c9c1c5ab512ef2d2
+
+GonkCameraSource.h:
+https://www.codeaurora.org/gitweb/quic/la/?p=platform/frameworks/base.git;a=blob;f=include/media/stagefright/CameraSource.h;hb=96af14d9b013496accf40a85a66fefcba3ac0111
+
+AudioParameter.cpp:
+https://www.codeaurora.org/gitweb/quic/la/?p=platform/frameworks/base.git;a=blob;f=media/libmedia/AudioParameter.cpp;hb=4dc22e77cfd2a1c3671e5646ee87c5e4c15596a0
+
+GonkCameraListener.h:
+https://www.codeaurora.org/gitweb/quic/la/?p=platform/frameworks/base.git;a=blob;f=include/camera/Camera.h;hb=796f35e408d9dca386f90d8fbde80471ac011fa6
+
+There were quite a few changes done to the above listed sources to support camcorder on B2G platform.
+update.patch lists the changes on top of the original files.
+update.sh shell script copies the files from an android tree and applies the patch to get updated files for B2G.
+
+
--- a/dom/camera/nsIDOMCameraManager.idl
+++ b/dom/camera/nsIDOMCameraManager.idl
@@ -1,15 +1,16 @@
 #include "domstubs.idl"
 
 #include "nsIDOMMediaStream.idl"
 #include "nsIDOMDOMRequest.idl"
 
 
 interface nsIDOMBlob;
+interface nsIDOMDeviceStorage;
 
 /* Used to set the dimensions of a captured picture,
    a preview stream, a video capture stream, etc. */
 dictionary CameraSize {
     unsigned long width;
     unsigned long height;
 };
 
@@ -103,20 +104,18 @@ interface nsICameraCapabilities : nsISup
     readonly attribute jsval        zoomRatios;
 
     /* an array of objects with 'height' and 'width' properties
        supported for video recording */
     [implicit_jscontext]
     readonly attribute jsval        videoSizes;
 };
 
-/*
-    These properties only affect the captured image;
-    invalid property settings are ignored.
-*/
+/* These properties only affect the captured image;
+   invalid property settings are ignored. */
 dictionary CameraPictureOptions
 {
     /* an object with a combination of 'height' and 'width' properties
        chosen from nsICameraCapabilities.pictureSizes */
     jsval     pictureSize;
 
     /* one of the file formats chosen from
        nsICameraCapabilities.fileFormats */
@@ -142,16 +141,24 @@ dictionary CameraPictureOptions
         available/desired.
 
         'altitude' is in metres; 'timestamp' is UTC, in seconds from
         January 1, 1970.
     */
     jsval     position;
 };
 
+/* These properties affect video recording. */
+dictionary CameraRecordingOptions
+{
+    long width;
+    long height;
+    long rotation;
+};
+
 [scriptable, function, uuid(0444a687-4bc9-462c-8246-5423f0fe46a4)]
 interface nsICameraPreviewStreamCallback : nsISupports
 {
     void handleEvent(in nsIDOMMediaStream stream);
 };
 
 [scriptable, function, uuid(6baa4ac7-9c25-4c48-9bb0-5193b38b9b0a)]
 interface nsICameraAutoFocusCallback : nsISupports
@@ -160,20 +167,20 @@ interface nsICameraAutoFocusCallback : n
 };
 
 [scriptable, function, uuid(17af779e-cb6f-4ca5-890c-06468ff82e4f)]
 interface nsICameraTakePictureCallback : nsISupports
 {
     void handleEvent(in nsIDOMBlob picture);
 };
 
-[scriptable, function, uuid(ac43f123-529c-48d3-84dd-ad206b7aca9b)]
+[scriptable, function, uuid(89a762f8-581b-410a-ad86-e2bd2113ad82)]
 interface nsICameraStartRecordingCallback : nsISupports
 {
-    void handleEvent(in nsIDOMMediaStream stream);
+    void handleEvent();
 };
 
 [scriptable, function, uuid(fb80db71-e315-42f0-9ea9-dd3dd312ed70)]
 interface nsICameraShutterCallback : nsISupports
 {
     void handleEvent();
 };
 
@@ -182,17 +189,17 @@ interface nsICameraErrorCallback : nsISu
 {
     void handleEvent(in DOMString error);
 };
 
 /*
     attributes here affect the preview, any pictures taken, and/or
     any video recorded by the camera.
 */
-[scriptable, uuid(b8949e5c-55b0-49dd-99a9-68d11342915a)]
+[scriptable, uuid(469e0462-59e4-4ed5-afa9-aecd1256ee30)]
 interface nsICameraControl : nsISupports
 {
     readonly attribute nsICameraCapabilities capabilities;
 
     /* one of the vales chosen from capabilities.effects;
        default is "none" */
     attribute DOMString         effect;
 
@@ -285,25 +292,31 @@ interface nsICameraControl : nsISupports
        if the camera supports it, this may be invoked while the camera is
        already recording video.
 
        invoking this function will stop the preview stream, which must be
        manually restarted (e.g. by calling .play() on it). */
     [implicit_jscontext]
     void takePicture(in jsval aOptions, in nsICameraTakePictureCallback onSuccess, [optional] in nsICameraErrorCallback onError);
 
-    /* start recording video; 'aOptions' define the frame size of to
-       capture, chosen from capabilities.videoSizes, e.g.:
+    /* get a media stream to be used as a camera viewfinder in video mode; 'aOptions' 
+       define the frame size of the video capture, chosen from capabilities.videoSizes, e.g.:
         {
             width: 640,
-            height: 480
+            height: 480,
+            rotation: 90
         }
     */
     [implicit_jscontext]
-    void startRecording(in jsval aOptions, in nsICameraStartRecordingCallback onSuccess, [optional] in nsICameraErrorCallback onError);
+    void getPreviewStreamVideoMode(in jsval aOptions, in nsICameraPreviewStreamCallback onSuccess, [optional] in nsICameraErrorCallback onError);
+
+    /* start recording video; 
+    */
+    [implicit_jscontext]
+    void startRecording(in nsIDOMDeviceStorage storageArea, in DOMString filename, in nsICameraStartRecordingCallback onSuccess, [optional] in nsICameraErrorCallback onError);
 
     /* stop precording video. */
     void stopRecording();
 
     /* get a media stream to be used as a camera viewfinder; the options
        define the desired frame size of the preview, chosen from
        capabilities.previewSizes, e.g.:
         {
new file mode 100644
--- /dev/null
+++ b/dom/camera/update.patch
@@ -0,0 +1,2296 @@
+diff --git a/GonkCameraListener.h b/GonkCameraListener.h
+index 67eeef3..243264c 100644
+--- a/GonkCameraListener.h
++++ b/GonkCameraListener.h
+@@ -14,49 +14,16 @@
+  * limitations under the License.
+  */
+ 
+-#ifndef ANDROID_HARDWARE_CAMERA_H
+-#define ANDROID_HARDWARE_CAMERA_H
++#ifndef GONK_CAMERA_LISTENER_H
++#define GONK_CAMERA_LISTENER_H
+ 
+ #include <utils/Timers.h>
+-#include <gui/ISurfaceTexture.h>
+-#include <system/camera.h>
+-#include <camera/ICameraClient.h>
+-#include <camera/ICameraRecordingProxy.h>
+-#include <camera/ICameraRecordingProxyListener.h>
++#include "libcameraservice/CameraHardwareInterface.h"
+ 
+ namespace android {
+ 
+-struct CameraInfo {
+-    /**
+-     * The direction that the camera faces to. It should be CAMERA_FACING_BACK
+-     * or CAMERA_FACING_FRONT.
+-     */
+-    int facing;
+-
+-    /**
+-     * The orientation of the camera image. The value is the angle that the
+-     * camera image needs to be rotated clockwise so it shows correctly on the
+-     * display in its natural orientation. It should be 0, 90, 180, or 270.
+-     *
+-     * For example, suppose a device has a naturally tall screen. The
+-     * back-facing camera sensor is mounted in landscape. You are looking at
+-     * the screen. If the top side of the camera sensor is aligned with the
+-     * right edge of the screen in natural orientation, the value should be
+-     * 90. If the top side of a front-facing camera sensor is aligned with the
+-     * right of the screen, the value should be 270.
+-     */
+-    int orientation;
+-    int mode;
+-};
+-
+-class ICameraService;
+-class ICamera;
+-class Surface;
+-class Mutex;
+-class String8;
+-
+ // ref-counted object for callbacks
+-class CameraListener: virtual public RefBase
++class GonkCameraListener: virtual public RefBase
+ {
+ public:
+     virtual void notify(int32_t msgType, int32_t ext1, int32_t ext2) = 0;
+@@ -65,133 +32,6 @@ public:
+     virtual void postDataTimestamp(nsecs_t timestamp, int32_t msgType, const sp<IMemory>& dataPtr) = 0;
+ };
+ 
+-class Camera : public BnCameraClient, public IBinder::DeathRecipient
+-{
+-public:
+-            // construct a camera client from an existing remote
+-    static  sp<Camera>  create(const sp<ICamera>& camera);
+-    static  int32_t     getNumberOfCameras();
+-    static  status_t    getCameraInfo(int cameraId,
+-                                      struct CameraInfo* cameraInfo);
+-    static  sp<Camera>  connect(int cameraId);
+-            virtual     ~Camera();
+-            void        init();
+-
+-            status_t    reconnect();
+-            void        disconnect();
+-            status_t    lock();
+-            status_t    unlock();
+-
+-            status_t    getStatus() { return mStatus; }
+-
+-            // pass the buffered Surface to the camera service
+-            status_t    setPreviewDisplay(const sp<Surface>& surface);
+-
+-            // pass the buffered ISurfaceTexture to the camera service
+-            status_t    setPreviewTexture(const sp<ISurfaceTexture>& surfaceTexture);
+-
+-            // start preview mode, must call setPreviewDisplay first
+-            status_t    startPreview();
+-
+-            // stop preview mode
+-            void        stopPreview();
+-
+-            // get preview state
+-            bool        previewEnabled();
+-
+-            // start recording mode, must call setPreviewDisplay first
+-            status_t    startRecording();
+-
+-            // stop recording mode
+-            void        stopRecording();
+-
+-            // get recording state
+-            bool        recordingEnabled();
+-
+-            // release a recording frame
+-            void        releaseRecordingFrame(const sp<IMemory>& mem);
+-
+-            // autoFocus - status returned from callback
+-            status_t    autoFocus();
+-
+-            // cancel auto focus
+-            status_t    cancelAutoFocus();
+-
+-            // take a picture - picture returned from callback
+-            status_t    takePicture(int msgType);
+-
+-            // set preview/capture parameters - key/value pairs
+-            status_t    setParameters(const String8& params);
+-
+-            // get preview/capture parameters - key/value pairs
+-            String8     getParameters() const;
+-
+-            // send command to camera driver
+-            status_t    sendCommand(int32_t cmd, int32_t arg1, int32_t arg2);
+-
+-            // tell camera hal to store meta data or real YUV in video buffers.
+-            status_t    storeMetaDataInBuffers(bool enabled);
+-
+-            void        setListener(const sp<CameraListener>& listener);
+-            void        setRecordingProxyListener(const sp<ICameraRecordingProxyListener>& listener);
+-            void        setPreviewCallbackFlags(int preview_callback_flag);
+-
+-            sp<ICameraRecordingProxy> getRecordingProxy();
+-
+-    // ICameraClient interface
+-    virtual void        notifyCallback(int32_t msgType, int32_t ext, int32_t ext2);
+-    virtual void        dataCallback(int32_t msgType, const sp<IMemory>& dataPtr,
+-                                     camera_frame_metadata_t *metadata);
+-    virtual void        dataCallbackTimestamp(nsecs_t timestamp, int32_t msgType, const sp<IMemory>& dataPtr);
+-
+-    sp<ICamera>         remote();
+-
+-    class RecordingProxy : public BnCameraRecordingProxy
+-    {
+-    public:
+-        RecordingProxy(const sp<Camera>& camera);
+-
+-        // ICameraRecordingProxy interface
+-        virtual status_t startRecording(const sp<ICameraRecordingProxyListener>& listener);
+-        virtual void stopRecording();
+-        virtual void releaseRecordingFrame(const sp<IMemory>& mem);
+-
+-    private:
+-        sp<Camera>         mCamera;
+-    };
+-
+-private:
+-                        Camera();
+-                        Camera(const Camera&);
+-                        Camera& operator=(const Camera);
+-                        virtual void binderDied(const wp<IBinder>& who);
+-
+-            class DeathNotifier: public IBinder::DeathRecipient
+-            {
+-            public:
+-                DeathNotifier() {
+-                }
+-
+-                virtual void binderDied(const wp<IBinder>& who);
+-            };
+-
+-            static sp<DeathNotifier> mDeathNotifier;
+-
+-            // helper function to obtain camera service handle
+-            static const sp<ICameraService>& getCameraService();
+-
+-            sp<ICamera>         mCamera;
+-            status_t            mStatus;
+-
+-            sp<CameraListener>  mListener;
+-            sp<ICameraRecordingProxyListener>  mRecordingProxyListener;
+-
+-            friend class DeathNotifier;
+-
+-            static  Mutex               mLock;
+-            static  sp<ICameraService>  mCameraService;
+-};
+-
+ }; // namespace android
+ 
+ #endif
+diff --git a/GonkCameraSource.cpp b/GonkCameraSource.cpp
+index af6b340..9dba596 100644
+--- a/GonkCameraSource.cpp
++++ b/GonkCameraSource.cpp
+@@ -14,29 +14,34 @@
+  * limitations under the License.
+  */
+ 
+-//#define LOG_NDEBUG 0
+-#define LOG_TAG "CameraSource"
+-#include <utils/Log.h>
++#include <base/basictypes.h>
++#include "nsDebug.h"
++#define DOM_CAMERA_LOG_LEVEL        3
++#include "CameraCommon.h"
++#define LOGD DOM_CAMERA_LOGA
++#define LOGV DOM_CAMERA_LOGI
++#define LOGI DOM_CAMERA_LOGI
++#define LOGW DOM_CAMERA_LOGW
++#define LOGE DOM_CAMERA_LOGE
+ 
+ #include <OMX_Component.h>
+-#include <binder/IPCThreadState.h>
+-#include <media/stagefright/CameraSource.h>
++#include "GonkCameraSource.h"
++#include "GonkCameraListener.h"
++#include "GonkCameraHwMgr.h"
+ #include <media/stagefright/MediaDebug.h>
+ #include <media/stagefright/MediaDefs.h>
+ #include <media/stagefright/MediaErrors.h>
+ #include <media/stagefright/MetaData.h>
+-#include <camera/Camera.h>
+-#include <camera/CameraParameters.h>
+-#include <surfaceflinger/Surface.h>
+ #include <utils/String8.h>
+ #include <cutils/properties.h>
+ 
++using namespace mozilla;
+ namespace android {
+ 
+ static const int64_t CAMERA_SOURCE_TIMEOUT_NS = 3000000000LL;
+ 
+-struct CameraSourceListener : public CameraListener {
+-    CameraSourceListener(const sp<CameraSource> &source);
++struct GonkCameraSourceListener : public GonkCameraListener {
++    GonkCameraSourceListener(const sp<GonkCameraSource> &source);
+ 
+     virtual void notify(int32_t msgType, int32_t ext1, int32_t ext2);
+     virtual void postData(int32_t msgType, const sp<IMemory> &dataPtr,
+@@ -46,41 +51,41 @@ struct CameraSourceListener : public CameraListener {
+             nsecs_t timestamp, int32_t msgType, const sp<IMemory>& dataPtr);
+ 
+ protected:
+-    virtual ~CameraSourceListener();
++    virtual ~GonkCameraSourceListener();
+ 
+ private:
+-    wp<CameraSource> mSource;
++    wp<GonkCameraSource> mSource;
+ 
+-    CameraSourceListener(const CameraSourceListener &);
+-    CameraSourceListener &operator=(const CameraSourceListener &);
++    GonkCameraSourceListener(const GonkCameraSourceListener &);
++    GonkCameraSourceListener &operator=(const GonkCameraSourceListener &);
+ };
+ 
+-CameraSourceListener::CameraSourceListener(const sp<CameraSource> &source)
++GonkCameraSourceListener::GonkCameraSourceListener(const sp<GonkCameraSource> &source)
+     : mSource(source) {
+ }
+ 
+-CameraSourceListener::~CameraSourceListener() {
++GonkCameraSourceListener::~GonkCameraSourceListener() {
+ }
+ 
+-void CameraSourceListener::notify(int32_t msgType, int32_t ext1, int32_t ext2) {
++void GonkCameraSourceListener::notify(int32_t msgType, int32_t ext1, int32_t ext2) {
+     LOGV("notify(%d, %d, %d)", msgType, ext1, ext2);
+ }
+ 
+-void CameraSourceListener::postData(int32_t msgType, const sp<IMemory> &dataPtr,
++void GonkCameraSourceListener::postData(int32_t msgType, const sp<IMemory> &dataPtr,
+                                     camera_frame_metadata_t *metadata) {
+     LOGV("postData(%d, ptr:%p, size:%d)",
+          msgType, dataPtr->pointer(), dataPtr->size());
+ 
+-    sp<CameraSource> source = mSource.promote();
++    sp<GonkCameraSource> source = mSource.promote();
+     if (source.get() != NULL) {
+         source->dataCallback(msgType, dataPtr);
+     }
+ }
+ 
+-void CameraSourceListener::postDataTimestamp(
++void GonkCameraSourceListener::postDataTimestamp(
+         nsecs_t timestamp, int32_t msgType, const sp<IMemory>& dataPtr) {
+ 
+-    sp<CameraSource> source = mSource.promote();
++    sp<GonkCameraSource> source = mSource.promote();
+     if (source.get() != NULL) {
+         source->dataCallbackTimestamp(timestamp/1000, msgType, dataPtr);
+     }
+@@ -114,48 +119,30 @@ static int32_t getColorFormat(const char* colorFormat) {
+     }
+ 
+     LOGE("Uknown color format (%s), please add it to "
+-         "CameraSource::getColorFormat", colorFormat);
++         "GonkCameraSource::getColorFormat", colorFormat);
+ 
+     CHECK_EQ(0, "Unknown color format");
+ }
+ 
+-CameraSource *CameraSource::Create() {
+-    Size size;
+-    size.width = -1;
+-    size.height = -1;
+-
+-    sp<ICamera> camera;
+-    return new CameraSource(camera, NULL, 0, size, -1, NULL, false);
+-}
+-
+-// static
+-CameraSource *CameraSource::CreateFromCamera(
+-    const sp<ICamera>& camera,
+-    const sp<ICameraRecordingProxy>& proxy,
+-    int32_t cameraId,
++GonkCameraSource *GonkCameraSource::Create(
++    int32_t cameraHandle,
+     Size videoSize,
+     int32_t frameRate,
+-    const sp<Surface>& surface,
+     bool storeMetaDataInVideoBuffers) {
+ 
+-    CameraSource *source = new CameraSource(camera, proxy, cameraId,
+-                    videoSize, frameRate, surface,
++    GonkCameraSource *source = new GonkCameraSource(cameraHandle,
++                    videoSize, frameRate,
+                     storeMetaDataInVideoBuffers);
+     return source;
+ }
+ 
+-CameraSource::CameraSource(
+-    const sp<ICamera>& camera,
+-    const sp<ICameraRecordingProxy>& proxy,
+-    int32_t cameraId,
++GonkCameraSource::GonkCameraSource(
++    int32_t cameraHandle,
+     Size videoSize,
+     int32_t frameRate,
+-    const sp<Surface>& surface,
+     bool storeMetaDataInVideoBuffers)
+     : mCameraFlags(0),
+       mVideoFrameRate(-1),
+-      mCamera(0),
+-      mSurface(surface),
+       mNumFramesReceived(0),
+       mLastFrameTimestampUs(0),
+       mStarted(false),
+@@ -169,43 +156,19 @@ CameraSource::CameraSource(
+     mVideoSize.width  = -1;
+     mVideoSize.height = -1;
+ 
+-    mInitCheck = init(camera, proxy, cameraId,
++    mCameraHandle = cameraHandle;
++
++    mInitCheck = init(
+                     videoSize, frameRate,
+                     storeMetaDataInVideoBuffers);
+     if (mInitCheck != OK) releaseCamera();
+ }
+ 
+-status_t CameraSource::initCheck() const {
++status_t GonkCameraSource::initCheck() const {
+     return mInitCheck;
+ }
+ 
+-status_t CameraSource::isCameraAvailable(
+-    const sp<ICamera>& camera, const sp<ICameraRecordingProxy>& proxy,
+-    int32_t cameraId) {
+-
+-    if (camera == 0) {
+-        mCamera = Camera::connect(cameraId);
+-        if (mCamera == 0) return -EBUSY;
+-        mCameraFlags &= ~FLAGS_HOT_CAMERA;
+-    } else {
+-        // We get the proxy from Camera, not ICamera. We need to get the proxy
+-        // to the remote Camera owned by the application. Here mCamera is a
+-        // local Camera object created by us. We cannot use the proxy from
+-        // mCamera here.
+-        mCamera = Camera::create(camera);
+-        if (mCamera == 0) return -EBUSY;
+-        mCameraRecordingProxy = proxy;
+-        mCameraFlags |= FLAGS_HOT_CAMERA;
+-        mDeathNotifier = new DeathNotifier();
+-        // isBinderAlive needs linkToDeath to work.
+-        mCameraRecordingProxy->asBinder()->linkToDeath(mDeathNotifier);
+-    }
+-
+-    mCamera->lock();
+-
+-    return OK;
+-}
+-
++//TODO: Do we need to reimplement isCameraAvailable?
+ 
+ /*
+  * Check to see whether the requested video width and height is one
+@@ -267,7 +230,7 @@ static void getSupportedVideoSizes(
+  * @param params CameraParameters to retrieve the information
+  * @return OK if no error.
+  */
+-status_t CameraSource::isCameraColorFormatSupported(
++status_t GonkCameraSource::isCameraColorFormatSupported(
+         const CameraParameters& params) {
+     mColorFormat = getColorFormat(params.get(
+             CameraParameters::KEY_VIDEO_FRAME_FORMAT));
+@@ -292,7 +255,7 @@ status_t CameraSource::isCameraColorFormatSupported(
+  * @param frameRate the target frame rate in frames per second.
+  * @return OK if no error.
+  */
+-status_t CameraSource::configureCamera(
++status_t GonkCameraSource::configureCamera(
+         CameraParameters* params,
+         int32_t width, int32_t height,
+         int32_t frameRate) {
+@@ -347,10 +310,9 @@ status_t CameraSource::configureCamera(
+ 
+     if (isCameraParamChanged) {
+         // Either frame rate or frame size needs to be changed.
+-        String8 s = params->flatten();
+-        if (OK != mCamera->setParameters(s)) {
++        if (OK != GonkCameraHardware::PushParameters(mCameraHandle,*params)) {
+             LOGE("Could not change settings."
+-                 " Someone else is using camera %p?", mCamera.get());
++                 " Someone else is using camera ?");
+             return -EBUSY;
+         }
+     }
+@@ -368,7 +330,7 @@ status_t CameraSource::configureCamera(
+  * @param the target video frame height in pixels to check against
+  * @return OK if no error
+  */
+-status_t CameraSource::checkVideoSize(
++status_t GonkCameraSource::checkVideoSize(
+         const CameraParameters& params,
+         int32_t width, int32_t height) {
+ 
+@@ -420,7 +382,7 @@ status_t CameraSource::checkVideoSize(
+  * @param the target video frame rate to check against
+  * @return OK if no error.
+  */
+-status_t CameraSource::checkFrameRate(
++status_t GonkCameraSource::checkFrameRate(
+         const CameraParameters& params,
+         int32_t frameRate) {
+ 
+@@ -462,39 +424,17 @@ status_t CameraSource::checkFrameRate(
+  *
+  * @return OK if no error.
+  */
+-status_t CameraSource::init(
+-        const sp<ICamera>& camera,
+-        const sp<ICameraRecordingProxy>& proxy,
+-        int32_t cameraId,
++status_t GonkCameraSource::init(
+         Size videoSize,
+         int32_t frameRate,
+         bool storeMetaDataInVideoBuffers) {
+ 
+     LOGV("init");
+     status_t err = OK;
+-    int64_t token = IPCThreadState::self()->clearCallingIdentity();
+-    err = initWithCameraAccess(camera, proxy, cameraId,
+-                               videoSize, frameRate,
+-                               storeMetaDataInVideoBuffers);
+-    IPCThreadState::self()->restoreCallingIdentity(token);
+-    return err;
+-}
+-
+-status_t CameraSource::initWithCameraAccess(
+-        const sp<ICamera>& camera,
+-        const sp<ICameraRecordingProxy>& proxy,
+-        int32_t cameraId,
+-        Size videoSize,
+-        int32_t frameRate,
+-        bool storeMetaDataInVideoBuffers) {
+-    LOGV("initWithCameraAccess");
+-    status_t err = OK;
++    //TODO: need to do something here to check the sanity of camera
+ 
+-    if ((err = isCameraAvailable(camera, proxy, cameraId)) != OK) {
+-        LOGE("Camera connection could not be established.");
+-        return err;
+-    }
+-    CameraParameters params(mCamera->getParameters());
++    CameraParameters params;
++    GonkCameraHardware::PullParameters(mCameraHandle, params);
+     if ((err = isCameraColorFormatSupported(params)) != OK) {
+         return err;
+     }
+@@ -508,7 +448,8 @@ status_t CameraSource::initWithCameraAccess(
+     }
+ 
+     // Check on video frame size and frame rate.
+-    CameraParameters newCameraParams(mCamera->getParameters());
++    CameraParameters newCameraParams;
++    GonkCameraHardware::PullParameters(mCameraHandle, newCameraParams);
+     if ((err = checkVideoSize(newCameraParams,
+                 videoSize.width, videoSize.height)) != OK) {
+         return err;
+@@ -517,15 +458,11 @@ status_t CameraSource::initWithCameraAccess(
+         return err;
+     }
+ 
+-    // This CHECK is good, since we just passed the lock/unlock
+-    // check earlier by calling mCamera->setParameters().
+-    CHECK_EQ(OK, mCamera->setPreviewDisplay(mSurface));
+-
+     // By default, do not store metadata in video buffers
+     mIsMetaDataStoredInVideoBuffers = false;
+-    mCamera->storeMetaDataInBuffers(false);
++    GonkCameraHardware::StoreMetaDataInBuffers(mCameraHandle, false);
+     if (storeMetaDataInVideoBuffers) {
+-        if (OK == mCamera->storeMetaDataInBuffers(true)) {
++        if (OK == GonkCameraHardware::StoreMetaDataInBuffers(mCameraHandle, true)) {
+             mIsMetaDataStoredInVideoBuffers = true;
+         }
+     }
+@@ -568,40 +505,28 @@ status_t CameraSource::initWithCameraAccess(
+     return OK;
+ }
+ 
+-CameraSource::~CameraSource() {
++GonkCameraSource::~GonkCameraSource() {
+     if (mStarted) {
+         stop();
+     } else if (mInitCheck == OK) {
+         // Camera is initialized but because start() is never called,
+         // the lock on Camera is never released(). This makes sure
+         // Camera's lock is released in this case.
++        // TODO: Don't think I need to do this
+         releaseCamera();
+     }
+ }
+ 
+-void CameraSource::startCameraRecording() {
++void GonkCameraSource::startCameraRecording() {
+     LOGV("startCameraRecording");
+-    // Reset the identity to the current thread because media server owns the
+-    // camera and recording is started by the applications. The applications
+-    // will connect to the camera in ICameraRecordingProxy::startRecording.
+-    int64_t token = IPCThreadState::self()->clearCallingIdentity();
+-    if (mCameraFlags & FLAGS_HOT_CAMERA) {
+-        mCamera->unlock();
+-        mCamera.clear();
+-        CHECK_EQ(OK, mCameraRecordingProxy->startRecording(new ProxyListener(this)));
+-    } else {
+-        mCamera->setListener(new CameraSourceListener(this));
+-        mCamera->startRecording();
+-        CHECK(mCamera->recordingEnabled());
+-    }
+-    IPCThreadState::self()->restoreCallingIdentity(token);
++    CHECK_EQ(OK, GonkCameraHardware::StartRecording(mCameraHandle));
+ }
+ 
+-status_t CameraSource::start(MetaData *meta) {
++status_t GonkCameraSource::start(MetaData *meta) {
+     LOGV("start");
+     CHECK(!mStarted);
+     if (mInitCheck != OK) {
+-        LOGE("CameraSource is not initialized yet");
++        LOGE("GonkCameraSource is not initialized yet");
+         return mInitCheck;
+     }
+ 
+@@ -614,58 +539,34 @@ status_t CameraSource::start(MetaData *meta) {
+     mStartTimeUs = 0;
+     int64_t startTimeUs;
+     if (meta && meta->findInt64(kKeyTime, &startTimeUs)) {
++        LOGV("Metadata enabled, startime: %lld us", startTimeUs);
+         mStartTimeUs = startTimeUs;
+     }
+ 
++    // Register a listener with GonkCameraHardware so that we can get callbacks
++    GonkCameraHardware::SetListener(mCameraHandle, new GonkCameraSourceListener(this));
++
+     startCameraRecording();
+ 
+     mStarted = true;
+     return OK;
+ }
+ 
+-void CameraSource::stopCameraRecording() {
++void GonkCameraSource::stopCameraRecording() {
+     LOGV("stopCameraRecording");
+-    if (mCameraFlags & FLAGS_HOT_CAMERA) {
+-        mCameraRecordingProxy->stopRecording();
+-    } else {
+-        mCamera->setListener(NULL);
+-        mCamera->stopRecording();
+-    }
++    GonkCameraHardware::StopRecording(mCameraHandle);
+ }
+ 
+-void CameraSource::releaseCamera() {
++void GonkCameraSource::releaseCamera() {
+     LOGV("releaseCamera");
+-    if (mCamera != 0) {
+-        int64_t token = IPCThreadState::self()->clearCallingIdentity();
+-        if ((mCameraFlags & FLAGS_HOT_CAMERA) == 0) {
+-            LOGV("Camera was cold when we started, stopping preview");
+-            mCamera->stopPreview();
+-            mCamera->disconnect();
+-        }
+-        mCamera->unlock();
+-        mCamera.clear();
+-        mCamera = 0;
+-        IPCThreadState::self()->restoreCallingIdentity(token);
+-    }
+-    if (mCameraRecordingProxy != 0) {
+-        mCameraRecordingProxy->asBinder()->unlinkToDeath(mDeathNotifier);
+-        mCameraRecordingProxy.clear();
+-    }
+-    mCameraFlags = 0;
+ }
+ 
+-status_t CameraSource::stop() {
+-    LOGD("stop: E");
++status_t GonkCameraSource::stop() {
++    LOGV("stop: E");
+     Mutex::Autolock autoLock(mLock);
+     mStarted = false;
+     mFrameAvailableCondition.signal();
+ 
+-    int64_t token;
+-    bool isTokenValid = false;
+-    if (mCamera != 0) {
+-        token = IPCThreadState::self()->clearCallingIdentity();
+-        isTokenValid = true;
+-    }
+     releaseQueuedFrames();
+     while (!mFramesBeingEncoded.empty()) {
+         if (NO_ERROR !=
+@@ -675,11 +576,9 @@ status_t CameraSource::stop() {
+                 mFramesBeingEncoded.size());
+         }
+     }
++    LOGV("Calling stopCameraRecording");
+     stopCameraRecording();
+     releaseCamera();
+-    if (isTokenValid) {
+-        IPCThreadState::self()->restoreCallingIdentity(token);
+-    }
+ 
+     if (mCollectStats) {
+         LOGI("Frames received/encoded/dropped: %d/%d/%d in %lld us",
+@@ -692,22 +591,16 @@ status_t CameraSource::stop() {
+     }
+ 
+     CHECK_EQ(mNumFramesReceived, mNumFramesEncoded + mNumFramesDropped);
+-    LOGD("stop: X");
++    LOGV("stop: X");
+     return OK;
+ }
+ 
+-void CameraSource::releaseRecordingFrame(const sp<IMemory>& frame) {
++void GonkCameraSource::releaseRecordingFrame(const sp<IMemory>& frame) {
+     LOGV("releaseRecordingFrame");
+-    if (mCameraRecordingProxy != NULL) {
+-        mCameraRecordingProxy->releaseRecordingFrame(frame);
+-    } else if (mCamera != NULL) {
+-        int64_t token = IPCThreadState::self()->clearCallingIdentity();
+-        mCamera->releaseRecordingFrame(frame);
+-        IPCThreadState::self()->restoreCallingIdentity(token);
+-    }
++    GonkCameraHardware::ReleaseRecordingFrame(mCameraHandle, frame);
+ }
+ 
+-void CameraSource::releaseQueuedFrames() {
++void GonkCameraSource::releaseQueuedFrames() {
+     List<sp<IMemory> >::iterator it;
+     while (!mFramesReceived.empty()) {
+         it = mFramesReceived.begin();
+@@ -717,15 +610,15 @@ void CameraSource::releaseQueuedFrames() {
+     }
+ }
+ 
+-sp<MetaData> CameraSource::getFormat() {
++sp<MetaData> GonkCameraSource::getFormat() {
+     return mMeta;
+ }
+ 
+-void CameraSource::releaseOneRecordingFrame(const sp<IMemory>& frame) {
++void GonkCameraSource::releaseOneRecordingFrame(const sp<IMemory>& frame) {
+     releaseRecordingFrame(frame);
+ }
+ 
+-void CameraSource::signalBufferReturned(MediaBuffer *buffer) {
++void GonkCameraSource::signalBufferReturned(MediaBuffer *buffer) {
+     LOGV("signalBufferReturned: %p", buffer->data());
+     Mutex::Autolock autoLock(mLock);
+     for (List<sp<IMemory> >::iterator it = mFramesBeingEncoded.begin();
+@@ -743,7 +636,7 @@ void CameraSource::signalBufferReturned(MediaBuffer *buffer) {
+     CHECK_EQ(0, "signalBufferReturned: bogus buffer");
+ }
+ 
+-status_t CameraSource::read(
++status_t GonkCameraSource::read(
+         MediaBuffer **buffer, const ReadOptions *options) {
+     LOGV("read");
+ 
+@@ -764,11 +657,7 @@ status_t CameraSource::read(
+             if (NO_ERROR !=
+                 mFrameAvailableCondition.waitRelative(mLock,
+                     mTimeBetweenFrameCaptureUs * 1000LL + CAMERA_SOURCE_TIMEOUT_NS)) {
+-                if (mCameraRecordingProxy != 0 &&
+-                    !mCameraRecordingProxy->asBinder()->isBinderAlive()) {
+-                    LOGW("camera recording proxy is gone");
+-                    return ERROR_END_OF_STREAM;
+-                }
++                //TODO: check sanity of camera?
+                 LOGW("Timed out waiting for incoming camera video frames: %lld us",
+                     mLastFrameTimestampUs);
+             }
+@@ -790,9 +679,10 @@ status_t CameraSource::read(
+     return OK;
+ }
+ 
+-void CameraSource::dataCallbackTimestamp(int64_t timestampUs,
++void GonkCameraSource::dataCallbackTimestamp(int64_t timestampUs,
+         int32_t msgType, const sp<IMemory> &data) {
+     LOGV("dataCallbackTimestamp: timestamp %lld us", timestampUs);
++    //LOGV("dataCallbackTimestamp: data %x size %d", data->pointer(), data->size());
+     Mutex::Autolock autoLock(mLock);
+     if (!mStarted || (mNumFramesReceived == 0 && timestampUs < mStartTimeUs)) {
+         LOGV("Drop frame at %lld/%lld us", timestampUs, mStartTimeUs);
+@@ -808,7 +698,7 @@ void CameraSource::dataCallbackTimestamp(int64_t timestampUs,
+     }
+ 
+     // May need to skip frame or modify timestamp. Currently implemented
+-    // by the subclass CameraSourceTimeLapse.
++    // by the subclass GonkCameraSourceTimeLapse.
+     if (skipCurrentFrame(timestampUs)) {
+         releaseOneRecordingFrame(data);
+         return;
+@@ -839,22 +729,9 @@ void CameraSource::dataCallbackTimestamp(int64_t timestampUs,
+     mFrameAvailableCondition.signal();
+ }
+ 
+-bool CameraSource::isMetaDataStoredInVideoBuffers() const {
++bool GonkCameraSource::isMetaDataStoredInVideoBuffers() const {
+     LOGV("isMetaDataStoredInVideoBuffers");
+     return mIsMetaDataStoredInVideoBuffers;
+ }
+ 
+-CameraSource::ProxyListener::ProxyListener(const sp<CameraSource>& source) {
+-    mSource = source;
+-}
+-
+-void CameraSource::ProxyListener::dataCallbackTimestamp(
+-        nsecs_t timestamp, int32_t msgType, const sp<IMemory>& dataPtr) {
+-    mSource->dataCallbackTimestamp(timestamp / 1000, msgType, dataPtr);
+-}
+-
+-void CameraSource::DeathNotifier::binderDied(const wp<IBinder>& who) {
+-    LOGI("Camera recording proxy died");
+-}
+-
+-}  // namespace android
++} // namespace android
+diff --git a/GonkCameraSource.h b/GonkCameraSource.h
+index 446720b..fe58f96 100644
+--- a/GonkCameraSource.h
++++ b/GonkCameraSource.h
+@@ -14,69 +14,31 @@
+  * limitations under the License.
+  */
+ 
+-#ifndef CAMERA_SOURCE_H_
++#ifndef GONK_CAMERA_SOURCE_H_
+ 
+-#define CAMERA_SOURCE_H_
++#define GONK_CAMERA_SOURCE_H_
+ 
+ #include <media/stagefright/MediaBuffer.h>
+ #include <media/stagefright/MediaSource.h>
+-#include <camera/ICamera.h>
+-#include <camera/ICameraRecordingProxyListener.h>
+ #include <camera/CameraParameters.h>
+ #include <utils/List.h>
+ #include <utils/RefBase.h>
++#include <utils/threads.h>
+ 
+ namespace android {
+ 
+ class IMemory;
+-class Camera;
+-class Surface;
++class GonkCameraSourceListener;
+ 
+-class CameraSource : public MediaSource, public MediaBufferObserver {
++class GonkCameraSource : public MediaSource, public MediaBufferObserver {
+ public:
+-    /**
+-     * Factory method to create a new CameraSource using the current
+-     * settings (such as video size, frame rate, color format, etc)
+-     * from the default camera.
+-     *
+-     * @return NULL on error.
+-     */
+-    static CameraSource *Create();
+ 
+-    /**
+-     * Factory method to create a new CameraSource.
+-     *
+-     * @param camera the video input frame data source. If it is NULL,
+-     *          we will try to connect to the camera with the given
+-     *          cameraId.
+-     *
+-     * @param cameraId the id of the camera that the source will connect
+-     *          to if camera is NULL; otherwise ignored.
+-     *
+-     * @param videoSize the dimension (in pixels) of the video frame
+-     * @param frameRate the target frames per second
+-     * @param surface the preview surface for display where preview
+-     *          frames are sent to
+-     * @param storeMetaDataInVideoBuffers true to request the camera
+-     *          source to store meta data in video buffers; false to
+-     *          request the camera source to store real YUV frame data
+-     *          in the video buffers. The camera source may not support
+-     *          storing meta data in video buffers, if so, a request
+-     *          to do that will NOT be honored. To find out whether
+-     *          meta data is actually being stored in video buffers
+-     *          during recording, call isMetaDataStoredInVideoBuffers().
+-     *
+-     * @return NULL on error.
+-     */
+-    static CameraSource *CreateFromCamera(const sp<ICamera> &camera,
+-                                          const sp<ICameraRecordingProxy> &proxy,
+-                                          int32_t cameraId,
+-                                          Size videoSize,
+-                                          int32_t frameRate,
+-                                          const sp<Surface>& surface,
+-                                          bool storeMetaDataInVideoBuffers = false);
++    static GonkCameraSource *Create(int32_t cameraHandle,
++                                    Size videoSize,
++                                    int32_t frameRate,
++                                    bool storeMetaDataInVideoBuffers = false);
+ 
+-    virtual ~CameraSource();
++    virtual ~GonkCameraSource();
+ 
+     virtual status_t start(MetaData *params = NULL);
+     virtual status_t stop();
+@@ -84,14 +46,14 @@ public:
+             MediaBuffer **buffer, const ReadOptions *options = NULL);
+ 
+     /**
+-     * Check whether a CameraSource object is properly initialized.
++     * Check whether a GonkCameraSource object is properly initialized.
+      * Must call this method before stop().
+      * @return OK if initialization has successfully completed.
+      */
+     virtual status_t initCheck() const;
+ 
+     /**
+-     * Returns the MetaData associated with the CameraSource,
++     * Returns the MetaData associated with the GonkCameraSource,
+      * including:
+      * kKeyColorFormat: YUV color format of the video frames
+      * kKeyWidth, kKeyHeight: dimension (in pixels) of the video frames
+@@ -113,22 +75,6 @@ public:
+     virtual void signalBufferReturned(MediaBuffer* buffer);
+ 
+ protected:
+-    class ProxyListener: public BnCameraRecordingProxyListener {
+-    public:
+-        ProxyListener(const sp<CameraSource>& source);
+-        virtual void dataCallbackTimestamp(int64_t timestampUs, int32_t msgType,
+-                const sp<IMemory> &data);
+-
+-    private:
+-        sp<CameraSource> mSource;
+-    };
+-
+-    // isBinderAlive needs linkToDeath to work.
+-    class DeathNotifier: public IBinder::DeathRecipient {
+-    public:
+-        DeathNotifier() {}
+-        virtual void binderDied(const wp<IBinder>& who);
+-    };
+ 
+     enum CameraFlags {
+         FLAGS_SET_CAMERA = 1L << 0,
+@@ -141,10 +87,6 @@ protected:
+     int32_t  mColorFormat;
+     status_t mInitCheck;
+ 
+-    sp<Camera>   mCamera;
+-    sp<ICameraRecordingProxy>   mCameraRecordingProxy;
+-    sp<DeathNotifier> mDeathNotifier;
+-    sp<Surface>  mSurface;
+     sp<MetaData> mMeta;
+ 
+     int64_t mStartTimeUs;
+@@ -156,11 +98,9 @@ protected:
+     // Time between capture of two frames.
+     int64_t mTimeBetweenFrameCaptureUs;
+ 
+-    CameraSource(const sp<ICamera>& camera, const sp<ICameraRecordingProxy>& proxy,
+-                 int32_t cameraId,
++    GonkCameraSource(int32_t cameraHandle,
+                  Size videoSize, int32_t frameRate,
+-                 const sp<Surface>& surface,
+-                 bool storeMetaDataInVideoBuffers);
++                 bool storeMetaDataInVideoBuffers = false);
+ 
+     virtual void startCameraRecording();
+     virtual void stopCameraRecording();
+@@ -170,6 +110,7 @@ protected:
+     // Called from dataCallbackTimestamp.
+     virtual bool skipCurrentFrame(int64_t timestampUs) {return false;}
+ 
++    friend class GonkCameraSourceListener;
+     // Callback called when still camera raw data is available.
+     virtual void dataCallback(int32_t msgType, const sp<IMemory> &data) {}
+ 
+@@ -177,7 +118,6 @@ protected:
+             const sp<IMemory> &data);
+ 
+ private:
+-    friend class CameraSourceListener;
+ 
+     Mutex mLock;
+     Condition mFrameAvailableCondition;
+@@ -192,23 +132,13 @@ private:
+     int64_t mGlitchDurationThresholdUs;
+     bool mCollectStats;
+     bool mIsMetaDataStoredInVideoBuffers;
++    int32_t mCameraHandle;
+ 
+     void releaseQueuedFrames();
+     void releaseOneRecordingFrame(const sp<IMemory>& frame);
+ 
+-
+-    status_t init(const sp<ICamera>& camera, const sp<ICameraRecordingProxy>& proxy,
+-                  int32_t cameraId, Size videoSize, int32_t frameRate,
+-                  bool storeMetaDataInVideoBuffers);
+-
+-    status_t initWithCameraAccess(
+-                  const sp<ICamera>& camera, const sp<ICameraRecordingProxy>& proxy,
+-                  int32_t cameraId, Size videoSize, int32_t frameRate,
++    status_t init(Size videoSize, int32_t frameRate,
+                   bool storeMetaDataInVideoBuffers);
+-
+-    status_t isCameraAvailable(const sp<ICamera>& camera,
+-                               const sp<ICameraRecordingProxy>& proxy,
+-                               int32_t cameraId);
+     status_t isCameraColorFormatSupported(const CameraParameters& params);
+     status_t configureCamera(CameraParameters* params,
+                     int32_t width, int32_t height,
+@@ -222,10 +152,10 @@ private:
+ 
+     void releaseCamera();
+ 
+-    CameraSource(const CameraSource &);
+-    CameraSource &operator=(const CameraSource &);
++    GonkCameraSource(const GonkCameraSource &);
++    GonkCameraSource &operator=(const GonkCameraSource &);
+ };
+ 
+ }  // namespace android
+ 
+-#endif  // CAMERA_SOURCE_H_
++#endif  // GONK_CAMERA_SOURCE_H_
+diff --git a/GonkRecorder.cpp b/GonkRecorder.cpp
+index b20ca9d..2dc625c 100644
+--- a/GonkRecorder.cpp
++++ b/GonkRecorder.cpp
+@@ -16,35 +16,23 @@
+  */
+ 
+ //#define LOG_NDEBUG 0
+-#define LOG_TAG "StagefrightRecorder"
++#define LOG_TAG "GonkRecorder"
++
+ #include <utils/Log.h>
+ #include <media/AudioParameter.h>
+-#include "StagefrightRecorder.h"
+-
+-#include <binder/IPCThreadState.h>
+-#include <binder/IServiceManager.h>
++#include "GonkRecorder.h"
+ 
+-#include <media/IMediaPlayerService.h>
+ #include <media/stagefright/AudioSource.h>
+ #include <media/stagefright/AMRWriter.h>
+-#include <media/stagefright/AACWriter.h>
+-#include <media/stagefright/ExtendedWriter.h>
+-#include <media/stagefright/FMA2DPWriter.h>
+-#include <media/stagefright/CameraSource.h>
+-#include <media/stagefright/CameraSourceTimeLapse.h>
+ #include <media/stagefright/ExtendedWriter.h>
+ #include <media/stagefright/MPEG2TSWriter.h>
+ #include <media/stagefright/MPEG4Writer.h>
+ #include <media/stagefright/MediaDebug.h>
+ #include <media/stagefright/MediaDefs.h>
+ #include <media/stagefright/MetaData.h>
+-#include <media/stagefright/OMXClient.h>
++#include <OMX.h>
+ #include <media/stagefright/OMXCodec.h>
+-#include <media/stagefright/SurfaceMediaSource.h>
+ #include <media/MediaProfiles.h>
+-#include <camera/ICamera.h>
+-#include <camera/CameraParameters.h>
+-#include <surfaceflinger/Surface.h>
+ #include <utils/String8.h>
+ 
+ #include <utils/Errors.h>
+@@ -57,51 +45,41 @@
+ #include "ARTPWriter.h"
+ 
+ #include <cutils/properties.h>
++#include "GonkCameraSource.h"
+ 
+ namespace android {
+ 
+-// To collect the encoder usage for the battery app
+-static void addBatteryData(uint32_t params) {
+-    sp<IBinder> binder =
+-        defaultServiceManager()->getService(String16("media.player"));
+-    sp<IMediaPlayerService> service = interface_cast<IMediaPlayerService>(binder);
+-    CHECK(service.get() != NULL);
+-
+-    service->addBatteryData(params);
++static sp<IOMX> sOMX = NULL;
++static sp<IOMX> GetOMX() {
++  if(sOMX.get() == NULL) {
++    sOMX = new OMX;
++    }
++  return sOMX;
+ }
+ 
+-
+-StagefrightRecorder::StagefrightRecorder()
++GonkRecorder::GonkRecorder()
+     : mWriter(NULL),
+       mOutputFd(-1),
+       mAudioSource(AUDIO_SOURCE_CNT),
+       mVideoSource(VIDEO_SOURCE_LIST_END),
+-      mStarted(false), mSurfaceMediaSource(NULL),
++      mStarted(false),
+       mDisableAudio(false) {
+ 
+     LOGV("Constructor");
+     reset();
+ }
+ 
+-StagefrightRecorder::~StagefrightRecorder() {
++GonkRecorder::~GonkRecorder() {
+     LOGV("Destructor");
+     stop();
+ }
+ 
+-status_t StagefrightRecorder::init() {
++status_t GonkRecorder::init() {
+     LOGV("init");
+     return OK;
+ }
+ 
+-// The client side of mediaserver asks it to creat a SurfaceMediaSource
+-// and return a interface reference. The client side will use that
+-// while encoding GL Frames
+-sp<ISurfaceTexture> StagefrightRecorder::querySurfaceMediaSource() const {
+-    LOGV("Get SurfaceMediaSource");
+-    return mSurfaceMediaSource;
+-}
+-
+-status_t StagefrightRecorder::setAudioSource(audio_source_t as) {
++status_t GonkRecorder::setAudioSource(audio_source_t as) {
+     LOGV("setAudioSource: %d", as);
+     if (as < AUDIO_SOURCE_DEFAULT ||
+         as >= AUDIO_SOURCE_CNT) {
+@@ -122,7 +100,7 @@
+     return OK;
+ }
+ 
+-status_t StagefrightRecorder::setVideoSource(video_source vs) {
++status_t GonkRecorder::setVideoSource(video_source vs) {
+     LOGV("setVideoSource: %d", vs);
+     if (vs < VIDEO_SOURCE_DEFAULT ||
+         vs >= VIDEO_SOURCE_LIST_END) {
+@@ -139,7 +117,7 @@
+     return OK;
+ }
+ 
+-status_t StagefrightRecorder::setOutputFormat(output_format of) {
++status_t GonkRecorder::setOutputFormat(output_format of) {
+     LOGV("setOutputFormat: %d", of);
+     if (of < OUTPUT_FORMAT_DEFAULT ||
+         of >= OUTPUT_FORMAT_LIST_END) {
+@@ -156,7 +134,7 @@
+     return OK;
+ }
+ 
+-status_t StagefrightRecorder::setAudioEncoder(audio_encoder ae) {
++status_t GonkRecorder::setAudioEncoder(audio_encoder ae) {
+     LOGV("setAudioEncoder: %d", ae);
+     if (ae < AUDIO_ENCODER_DEFAULT ||
+         ae >= AUDIO_ENCODER_LIST_END) {
+@@ -174,21 +152,10 @@
+         mAudioEncoder = ae;
+     }
+ 
+-    // Use default values if appropriate setparam's weren't called.
+-    if(mAudioEncoder == AUDIO_ENCODER_AAC) {
+-        mSampleRate = mSampleRate ? mSampleRate : 48000;
+-        mAudioChannels = mAudioChannels ? mAudioChannels : 2;
+-        mAudioBitRate = mAudioBitRate ? mAudioBitRate : 156000;
+-    }
+-    else{
+-        mSampleRate = mSampleRate ? mSampleRate : 8000;
+-        mAudioChannels = mAudioChannels ? mAudioChannels : 1;
+-        mAudioBitRate = mAudioBitRate ? mAudioBitRate : 12200;
+-    }
+     return OK;
+ }
+ 
+-status_t StagefrightRecorder::setVideoEncoder(video_encoder ve) {
++status_t GonkRecorder::setVideoEncoder(video_encoder ve) {
+     LOGV("setVideoEncoder: %d", ve);
+     if (ve < VIDEO_ENCODER_DEFAULT ||
+         ve >= VIDEO_ENCODER_LIST_END) {
+@@ -205,7 +172,7 @@
+     return OK;
+ }
+ 
+-status_t StagefrightRecorder::setVideoSize(int width, int height) {
++status_t GonkRecorder::setVideoSize(int width, int height) {
+     LOGV("setVideoSize: %dx%d", width, height);
+     if (width <= 0 || height <= 0) {
+         LOGE("Invalid video size: %dx%d", width, height);
+@@ -219,7 +186,7 @@
+     return OK;
+ }
+ 
+-status_t StagefrightRecorder::setVideoFrameRate(int frames_per_second) {
++status_t GonkRecorder::setVideoFrameRate(int frames_per_second) {
+     LOGV("setVideoFrameRate: %d", frames_per_second);
+     if ((frames_per_second <= 0 && frames_per_second != -1) ||
+         frames_per_second > 120) {
+@@ -233,31 +200,7 @@
+     return OK;
+ }
+ 
+-status_t StagefrightRecorder::setCamera(const sp<ICamera> &camera,
+-                                        const sp<ICameraRecordingProxy> &proxy) {
+-    LOGV("setCamera");
+-    if (camera == 0) {
+-        LOGE("camera is NULL");
+-        return BAD_VALUE;
+-    }
+-    if (proxy == 0) {
+-        LOGE("camera proxy is NULL");
+-        return BAD_VALUE;
+-    }
+-
+-    mCamera = camera;
+-    mCameraProxy = proxy;
+-    return OK;
+-}
+-
+-status_t StagefrightRecorder::setPreviewSurface(const sp<Surface> &surface) {
+-    LOGV("setPreviewSurface: %p", surface.get());
+-    mPreviewSurface = surface;
+-
+-    return OK;
+-}
+-
+-status_t StagefrightRecorder::setOutputFile(const char *path) {
++status_t GonkRecorder::setOutputFile(const char *path) {
+     LOGE("setOutputFile(const char*) must not be called");
+     // We don't actually support this at all, as the media_server process
+     // no longer has permissions to create files.
+@@ -265,7 +208,7 @@
+     return -EPERM;
+ }
+ 
+-status_t StagefrightRecorder::setOutputFile(int fd, int64_t offset, int64_t length) {
++status_t GonkRecorder::setOutputFile(int fd, int64_t offset, int64_t length) {
+     LOGV("setOutputFile: %d, %lld, %lld", fd, offset, length);
+     // These don't make any sense, do they?
+     CHECK_EQ(offset, 0);
+@@ -339,7 +282,7 @@
+     s->setTo(String8(&data[leading_space], i - leading_space));
+ }
+ 
+-status_t StagefrightRecorder::setParamAudioSamplingRate(int32_t sampleRate) {
++status_t GonkRecorder::setParamAudioSamplingRate(int32_t sampleRate) {
+     LOGV("setParamAudioSamplingRate: %d", sampleRate);
+     if (sampleRate <= 0) {
+         LOGE("Invalid audio sampling rate: %d", sampleRate);
+@@ -351,7 +294,7 @@
+     return OK;
+ }
+ 
+-status_t StagefrightRecorder::setParamAudioNumberOfChannels(int32_t channels) {
++status_t GonkRecorder::setParamAudioNumberOfChannels(int32_t channels) {
+     LOGV("setParamAudioNumberOfChannels: %d", channels);
+     if (channels <= 0 || channels >= 3) {
+         LOGE("Invalid number of audio channels: %d", channels);
+@@ -363,7 +306,7 @@
+     return OK;
+ }
+ 
+-status_t StagefrightRecorder::setParamAudioEncodingBitRate(int32_t bitRate) {
++status_t GonkRecorder::setParamAudioEncodingBitRate(int32_t bitRate) {
+     LOGV("setParamAudioEncodingBitRate: %d", bitRate);
+     if (bitRate <= 0) {
+         LOGE("Invalid audio encoding bit rate: %d", bitRate);
+@@ -378,7 +321,7 @@
+     return OK;
+ }
+ 
+-status_t StagefrightRecorder::setParamVideoEncodingBitRate(int32_t bitRate) {
++status_t GonkRecorder::setParamVideoEncodingBitRate(int32_t bitRate) {
+     LOGV("setParamVideoEncodingBitRate: %d", bitRate);
+     if (bitRate <= 0) {
+         LOGE("Invalid video encoding bit rate: %d", bitRate);
+@@ -394,7 +337,7 @@
+ }
+ 
+ // Always rotate clockwise, and only support 0, 90, 180 and 270 for now.
+-status_t StagefrightRecorder::setParamVideoRotation(int32_t degrees) {
++status_t GonkRecorder::setParamVideoRotation(int32_t degrees) {
+     LOGV("setParamVideoRotation: %d", degrees);
+     if (degrees < 0 || degrees % 90 != 0) {
+         LOGE("Unsupported video rotation angle: %d", degrees);
+@@ -404,7 +347,7 @@
+     return OK;
+ }
+ 
+-status_t StagefrightRecorder::setParamMaxFileDurationUs(int64_t timeUs) {
++status_t GonkRecorder::setParamMaxFileDurationUs(int64_t timeUs) {
+     LOGV("setParamMaxFileDurationUs: %lld us", timeUs);
+ 
+     // This is meant for backward compatibility for MediaRecorder.java
+@@ -423,7 +366,7 @@
+     return OK;
+ }
+ 
+-status_t StagefrightRecorder::setParamMaxFileSizeBytes(int64_t bytes) {
++status_t GonkRecorder::setParamMaxFileSizeBytes(int64_t bytes) {
+     LOGV("setParamMaxFileSizeBytes: %lld bytes", bytes);
+ 
+     // This is meant for backward compatibility for MediaRecorder.java
+@@ -449,7 +392,7 @@
+     return OK;
+ }
+ 
+-status_t StagefrightRecorder::setParamInterleaveDuration(int32_t durationUs) {
++status_t GonkRecorder::setParamInterleaveDuration(int32_t durationUs) {
+     LOGV("setParamInterleaveDuration: %d", durationUs);
+     if (durationUs <= 500000) {           //  500 ms
+         // If interleave duration is too small, it is very inefficient to do
+@@ -471,20 +414,20 @@
+ // If seconds <  0, only the first frame is I frame, and rest are all P frames
+ // If seconds == 0, all frames are encoded as I frames. No P frames
+ // If seconds >  0, it is the time spacing (seconds) between 2 neighboring I frames
+-status_t StagefrightRecorder::setParamVideoIFramesInterval(int32_t seconds) {
++status_t GonkRecorder::setParamVideoIFramesInterval(int32_t seconds) {
+     LOGV("setParamVideoIFramesInterval: %d seconds", seconds);
+     mIFramesIntervalSec = seconds;
+     return OK;
+ }
+ 
+-status_t StagefrightRecorder::setParam64BitFileOffset(bool use64Bit) {
++status_t GonkRecorder::setParam64BitFileOffset(bool use64Bit) {
+     LOGV("setParam64BitFileOffset: %s",
+         use64Bit? "use 64 bit file offset": "use 32 bit file offset");
+     mUse64BitFileOffset = use64Bit;
+     return OK;
+ }
+ 
+-status_t StagefrightRecorder::setParamVideoCameraId(int32_t cameraId) {
++status_t GonkRecorder::setParamVideoCameraId(int32_t cameraId) {
+     LOGV("setParamVideoCameraId: %d", cameraId);
+     if (cameraId < 0) {
+         return BAD_VALUE;
+@@ -493,7 +436,7 @@
+     return OK;
+ }
+ 
+-status_t StagefrightRecorder::setParamTrackTimeStatus(int64_t timeDurationUs) {
++status_t GonkRecorder::setParamTrackTimeStatus(int64_t timeDurationUs) {
+     LOGV("setParamTrackTimeStatus: %lld", timeDurationUs);
+     if (timeDurationUs < 20000) {  // Infeasible if shorter than 20 ms?
+         LOGE("Tracking time duration too short: %lld us", timeDurationUs);
+@@ -503,7 +446,7 @@
+     return OK;
+ }
+ 
+-status_t StagefrightRecorder::setParamVideoEncoderProfile(int32_t profile) {
++status_t GonkRecorder::setParamVideoEncoderProfile(int32_t profile) {
+     LOGV("setParamVideoEncoderProfile: %d", profile);
+ 
+     // Additional check will be done later when we load the encoder.
+@@ -512,7 +455,7 @@
+     return OK;
+ }
+ 
+-status_t StagefrightRecorder::setParamVideoEncoderLevel(int32_t level) {
++status_t GonkRecorder::setParamVideoEncoderLevel(int32_t level) {
+     LOGV("setParamVideoEncoderLevel: %d", level);
+ 
+     // Additional check will be done later when we load the encoder.
+@@ -521,7 +464,7 @@
+     return OK;
+ }
+ 
+-status_t StagefrightRecorder::setParamMovieTimeScale(int32_t timeScale) {
++status_t GonkRecorder::setParamMovieTimeScale(int32_t timeScale) {
+     LOGV("setParamMovieTimeScale: %d", timeScale);
+ 
+     // The range is set to be the same as the audio's time scale range
+@@ -534,7 +477,7 @@
+     return OK;
+ }
+ 
+-status_t StagefrightRecorder::setParamVideoTimeScale(int32_t timeScale) {
++status_t GonkRecorder::setParamVideoTimeScale(int32_t timeScale) {
+     LOGV("setParamVideoTimeScale: %d", timeScale);
+ 
+     // 60000 is chosen to make sure that each video frame from a 60-fps
+@@ -547,7 +490,7 @@
+     return OK;
+ }
+ 
+-status_t StagefrightRecorder::setParamAudioTimeScale(int32_t timeScale) {
++status_t GonkRecorder::setParamAudioTimeScale(int32_t timeScale) {
+     LOGV("setParamAudioTimeScale: %d", timeScale);
+ 
+     // 96000 Hz is the highest sampling rate support in AAC.
+@@ -559,33 +502,7 @@
+     return OK;
+ }
+ 
+-status_t StagefrightRecorder::setParamTimeLapseEnable(int32_t timeLapseEnable) {
+-    LOGV("setParamTimeLapseEnable: %d", timeLapseEnable);
+-
+-    if(timeLapseEnable == 0) {
+-        mCaptureTimeLapse = false;
+-    } else if (timeLapseEnable == 1) {
+-        mCaptureTimeLapse = true;
+-    } else {
+-        return BAD_VALUE;
+-    }
+-    return OK;
+-}
+-
+-status_t StagefrightRecorder::setParamTimeBetweenTimeLapseFrameCapture(int64_t timeUs) {
+-    LOGV("setParamTimeBetweenTimeLapseFrameCapture: %lld us", timeUs);
+-
+-    // Not allowing time more than a day
+-    if (timeUs <= 0 || timeUs > 86400*1E6) {
+-        LOGE("Time between time lapse frame capture (%lld) is out of range [0, 1 Day]", timeUs);
+-        return BAD_VALUE;
+-    }
+-
+-    mTimeBetweenTimeLapseFrameCaptureUs = timeUs;
+-    return OK;
+-}
+-
+-status_t StagefrightRecorder::setParamGeoDataLongitude(
++status_t GonkRecorder::setParamGeoDataLongitude(
+     int64_t longitudex10000) {
+ 
+     if (longitudex10000 > 1800000 || longitudex10000 < -1800000) {
+@@ -595,7 +512,7 @@
+     return OK;
+ }
+ 
+-status_t StagefrightRecorder::setParamGeoDataLatitude(
++status_t GonkRecorder::setParamGeoDataLatitude(
+     int64_t latitudex10000) {
+ 
+     if (latitudex10000 > 900000 || latitudex10000 < -900000) {
+@@ -605,7 +522,7 @@
+     return OK;
+ }
+ 
+-status_t StagefrightRecorder::setParameter(
++status_t GonkRecorder::setParameter(
+         const String8 &key, const String8 &value) {
+     LOGV("setParameter: key (%s) => value (%s)", key.string(), value.string());
+     if (key == "max-duration") {
+@@ -703,24 +620,13 @@
+         if (safe_strtoi32(value.string(), &timeScale)) {
+             return setParamVideoTimeScale(timeScale);
+         }
+-    } else if (key == "time-lapse-enable") {
+-        int32_t timeLapseEnable;
+-        if (safe_strtoi32(value.string(), &timeLapseEnable)) {
+-            return setParamTimeLapseEnable(timeLapseEnable);
+-        }
+-    } else if (key == "time-between-time-lapse-frame-capture") {
+-        int64_t timeBetweenTimeLapseFrameCaptureMs;
+-        if (safe_strtoi64(value.string(), &timeBetweenTimeLapseFrameCaptureMs)) {
+-            return setParamTimeBetweenTimeLapseFrameCapture(
+-                    1000LL * timeBetweenTimeLapseFrameCaptureMs);
+-        }
+     } else {
+         LOGE("setParameter: failed to find key %s", key.string());
+     }
+     return BAD_VALUE;
+ }
+ 
+-status_t StagefrightRecorder::setParameters(const String8 &params) {
++status_t GonkRecorder::setParameters(const String8 &params) {
+     LOGV("setParameters: %s", params.string());
+     const char *cparams = params.string();
+     const char *key_start = cparams;
+@@ -755,13 +661,13 @@
+     return OK;
+ }
+ 
+-status_t StagefrightRecorder::setListener(const sp<IMediaRecorderClient> &listener) {
++status_t GonkRecorder::setListener(const sp<IMediaRecorderClient> &listener) {
+     mListener = listener;
+ 
+     return OK;
+ }
+ 
+-status_t StagefrightRecorder::prepare() {
++status_t GonkRecorder::prepare() {
+   LOGV(" %s E", __func__ );
+ 
+   if(mVideoSource != VIDEO_SOURCE_LIST_END && mVideoEncoder != VIDEO_ENCODER_LIST_END && mVideoHeight && mVideoWidth &&             /*Video recording*/
+@@ -776,17 +682,15 @@
+   return OK;
+ }
+ 
+-status_t StagefrightRecorder::start() {
++status_t GonkRecorder::start() {
+     CHECK(mOutputFd >= 0);
+ 
+     if (mWriter != NULL) {
+-        LOGE("File writer is not avaialble");
++        LOGE("File writer is not available");
+         return UNKNOWN_ERROR;
+     }
+ 
+     status_t status = OK;
+-    if(AUDIO_SOURCE_FM_RX_A2DP == mAudioSource)
+-        return startFMA2DPWriter();
+ 
+     switch (mOutputFormat) {
+         case OUTPUT_FORMAT_DEFAULT:
+@@ -800,22 +704,9 @@
+             status = startAMRRecording();
+             break;
+ 
+-        case OUTPUT_FORMAT_AAC_ADIF:
+-        case OUTPUT_FORMAT_AAC_ADTS:
+-            status = startAACRecording();
+-            break;
+-
+-        case OUTPUT_FORMAT_RTP_AVP:
+-            status = startRTPRecording();
+-            break;
+-
+         case OUTPUT_FORMAT_MPEG2TS:
+             status = startMPEG2TSRecording();
+ 		    break;
+-			
+-        case OUTPUT_FORMAT_QCP:
+-            status = startExtendedRecording( );
+-		    break;
+         default:
+             LOGE("Unsupported output file format: %d", mOutputFormat);
+             status = UNKNOWN_ERROR;
+@@ -824,22 +715,12 @@
+ 
+     if ((status == OK) && (!mStarted)) {
+         mStarted = true;
+-
+-        uint32_t params = IMediaPlayerService::kBatteryDataCodecStarted;
+-        if (mAudioSource != AUDIO_SOURCE_CNT) {
+-            params |= IMediaPlayerService::kBatteryDataTrackAudio;
+-        }
+-        if (mVideoSource != VIDEO_SOURCE_LIST_END) {
+-            params |= IMediaPlayerService::kBatteryDataTrackVideo;
+-        }
+-
+-        addBatteryData(params);
+     }
+ 
+     return status;
+ }
+ 
+-sp<MediaSource> StagefrightRecorder::createAudioSource() {
++sp<MediaSource> GonkRecorder::createAudioSource() {
+ 
+     bool tunneledSource = false;
+     const char *tunnelMime;
+@@ -907,12 +788,6 @@
+         case AUDIO_ENCODER_AAC:
+             mime = MEDIA_MIMETYPE_AUDIO_AAC;
+             break;
+-        case AUDIO_ENCODER_EVRC:
+-            mime = MEDIA_MIMETYPE_AUDIO_EVRC;
+-            break;
+-        case AUDIO_ENCODER_QCELP:
+-            mime = MEDIA_MIMETYPE_AUDIO_QCELP;
+-            break;
+         default:
+             LOGE("Unknown audio encoder: %d", mAudioEncoder);
+             return NULL;
+@@ -931,36 +806,17 @@
+         encMeta->setInt32(kKeyTimeScale, mAudioTimeScale);
+     }
+ 
+-    OMXClient client;
+-    CHECK_EQ(client.connect(), OK);
+-
++    // use direct OMX interface instead of connecting to
++    // mediaserver over binder calls
+     sp<MediaSource> audioEncoder =
+-        OMXCodec::Create(client.interface(), encMeta,
++        OMXCodec::Create(GetOMX(), encMeta,
+                          true /* createEncoder */, audioSource);
+     mAudioSourceNode = audioSource;
+ 
+     return audioEncoder;
+ }
+ 
+-status_t StagefrightRecorder::startAACRecording() {
+-    // FIXME:
+-    // Add support for OUTPUT_FORMAT_AAC_ADIF
+-    CHECK(mOutputFormat == OUTPUT_FORMAT_AAC_ADTS);
+-
+-    CHECK(mAudioEncoder == AUDIO_ENCODER_AAC);
+-    CHECK(mAudioSource != AUDIO_SOURCE_CNT);
+-
+-    mWriter = new AACWriter(mOutputFd);
+-    status_t status = startRawAudioRecording();
+-    if (status != OK) {
+-        mWriter.clear();
+-        mWriter = NULL;
+-    }
+-
+-    return status;
+-}
+-
+-status_t StagefrightRecorder::startAMRRecording() {
++status_t GonkRecorder::startAMRRecording() {
+     CHECK(mOutputFormat == OUTPUT_FORMAT_AMR_NB ||
+           mOutputFormat == OUTPUT_FORMAT_AMR_WB);
+ 
+@@ -971,28 +827,12 @@
+                     mAudioEncoder);
+             return BAD_VALUE;
+         }
+-        if (mSampleRate != 8000) {
+-            LOGE("Invalid sampling rate %d used for AMRNB recording",
+-                    mSampleRate);
+-            return BAD_VALUE;
+-        }
+     } else {  // mOutputFormat must be OUTPUT_FORMAT_AMR_WB
+         if (mAudioEncoder != AUDIO_ENCODER_AMR_WB) {
+             LOGE("Invlaid encoder %d used for AMRWB recording",
+                     mAudioEncoder);
+             return BAD_VALUE;
+         }
+-        if (mSampleRate != 16000) {
+-            LOGE("Invalid sample rate %d used for AMRWB recording",
+-                    mSampleRate);
+-            return BAD_VALUE;
+-        }
+-    }
+-
+-    if (mAudioChannels != 1) {
+-        LOGE("Invalid number of audio channels %d used for amr recording",
+-                mAudioChannels);
+-        return BAD_VALUE;
+     }
+ 
+     mWriter = new AMRWriter(mOutputFd);
+@@ -1004,7 +844,7 @@
+     return status;
+ }
+ 
+-status_t StagefrightRecorder::startRawAudioRecording() {
++status_t GonkRecorder::startRawAudioRecording() {
+     if (mAudioSource >= AUDIO_SOURCE_CNT) {
+         LOGE("Invalid audio source: %d", mAudioSource);
+         return BAD_VALUE;
+@@ -1035,62 +875,7 @@
+     return OK;
+ }
+ 
+-status_t StagefrightRecorder::startFMA2DPWriter() {
+-    /* FM soc outputs at 48k */
+-	mSampleRate = 48000;
+-	mAudioChannels = 2;
+-	
+-    sp<MetaData> meta = new MetaData;
+-    meta->setInt32(kKeyChannelCount, mAudioChannels);
+-    meta->setInt32(kKeySampleRate, mSampleRate);
+-
+-    mWriter = new FMA2DPWriter();
+-    mWriter->setListener(mListener);
+-    mWriter->start(meta.get());
+-    return OK;
+-}
+-
+-status_t StagefrightRecorder::startRTPRecording() {
+-    CHECK_EQ(mOutputFormat, OUTPUT_FORMAT_RTP_AVP);
+-
+-    if ((mAudioSource != AUDIO_SOURCE_CNT
+-                && mVideoSource != VIDEO_SOURCE_LIST_END)
+-            || (mAudioSource == AUDIO_SOURCE_CNT
+-                && mVideoSource == VIDEO_SOURCE_LIST_END)) {
+-        // Must have exactly one source.
+-        return BAD_VALUE;
+-    }
+-
+-    if (mOutputFd < 0) {
+-        return BAD_VALUE;
+-    }
+-
+-    sp<MediaSource> source;
+-
+-    if (mAudioSource != AUDIO_SOURCE_CNT) {
+-        source = createAudioSource();
+-    } else {
+-
+-        sp<MediaSource> mediaSource;
+-        status_t err = setupMediaSource(&mediaSource);
+-        if (err != OK) {
+-            return err;
+-        }
+-
+-        err = setupVideoEncoder(mediaSource, mVideoBitRate, &source);
+-        if (err != OK) {
+-            return err;
+-        }
+-    }
+-
+-    mWriter = new ARTPWriter(mOutputFd);
+-    mWriter->addSource(source);
+-    mWriter->setListener(mListener);
+-
+-    return mWriter->start();
+-}
+-
+-status_t StagefrightRecorder::startMPEG2TSRecording() {
++status_t GonkRecorder::startMPEG2TSRecording() {
+     CHECK_EQ(mOutputFormat, OUTPUT_FORMAT_MPEG2TS);
+ 
+     sp<MediaWriter> writer = new MPEG2TSWriter(mOutputFd);
+@@ -1141,7 +926,7 @@
+     return mWriter->start();
+ }
+ 
+-void StagefrightRecorder::clipVideoFrameRate() {
++void GonkRecorder::clipVideoFrameRate() {
+     LOGV("clipVideoFrameRate: encoder %d", mVideoEncoder);
+     int minFrameRate = mEncoderProfiles->getVideoEncoderParamByName(
+                         "enc.vid.fps.min", mVideoEncoder);
+@@ -1158,7 +943,7 @@
+     }
+ }
+ 
+-void StagefrightRecorder::clipVideoBitRate() {
++void GonkRecorder::clipVideoBitRate() {
+     LOGV("clipVideoBitRate: encoder %d", mVideoEncoder);
+     int minBitRate = mEncoderProfiles->getVideoEncoderParamByName(
+                         "enc.vid.bps.min", mVideoEncoder);
+@@ -1175,7 +960,7 @@
+     }
+ }
+ 
+-void StagefrightRecorder::clipVideoFrameWidth() {
++void GonkRecorder::clipVideoFrameWidth() {
+     LOGV("clipVideoFrameWidth: encoder %d", mVideoEncoder);
+     int minFrameWidth = mEncoderProfiles->getVideoEncoderParamByName(
+                         "enc.vid.width.min", mVideoEncoder);
+@@ -1192,8 +977,7 @@
+     }
+ }
+ 
+-status_t StagefrightRecorder::checkVideoEncoderCapabilities() {
+-    if (!mCaptureTimeLapse) {
++status_t GonkRecorder::checkVideoEncoderCapabilities() {
+         // Dont clip for time lapse capture as encoder will have enough
+         // time to encode because of slow capture rate of time lapse.
+         clipVideoBitRate();
+@@ -1201,13 +985,12 @@
+         clipVideoFrameWidth();
+         clipVideoFrameHeight();
+         setDefaultProfileIfNecessary();
+-    }
+     return OK;
+ }
+ 
+ // Set to use AVC baseline profile if the encoding parameters matches
+ // CAMCORDER_QUALITY_LOW profile; this is for the sake of MMS service.
+-void StagefrightRecorder::setDefaultProfileIfNecessary() {
++void GonkRecorder::setDefaultProfileIfNecessary() {
+     LOGV("setDefaultProfileIfNecessary");
+ 
+     camcorder_quality quality = CAMCORDER_QUALITY_LOW;
+@@ -1263,14 +1046,14 @@
+     }
+ }
+ 
+-status_t StagefrightRecorder::checkAudioEncoderCapabilities() {
++status_t GonkRecorder::checkAudioEncoderCapabilities() {
+     clipAudioBitRate();
+     clipAudioSampleRate();
+     clipNumberOfAudioChannels();
+     return OK;
+ }
+ 
+-void StagefrightRecorder::clipAudioBitRate() {
++void GonkRecorder::clipAudioBitRate() {
+     LOGV("clipAudioBitRate: encoder %d", mAudioEncoder);
+ 
+     int minAudioBitRate =
+@@ -1292,7 +1075,7 @@
+     }
+ }
+ 
+-void StagefrightRecorder::clipAudioSampleRate() {
++void GonkRecorder::clipAudioSampleRate() {
+     LOGV("clipAudioSampleRate: encoder %d", mAudioEncoder);
+ 
+     int minSampleRate =
+@@ -1314,7 +1097,7 @@
+     }
+ }
+ 
+-void StagefrightRecorder::clipNumberOfAudioChannels() {
++void GonkRecorder::clipNumberOfAudioChannels() {
+     LOGV("clipNumberOfAudioChannels: encoder %d", mAudioEncoder);
+ 
+     int minChannels =
+@@ -1336,7 +1119,7 @@
+     }
+ }
+ 
+-void StagefrightRecorder::clipVideoFrameHeight() {
++void GonkRecorder::clipVideoFrameHeight() {
+     LOGV("clipVideoFrameHeight: encoder %d", mVideoEncoder);
+     int minFrameHeight = mEncoderProfiles->getVideoEncoderParamByName(
+                         "enc.vid.height.min", mVideoEncoder);
+@@ -1354,61 +1137,26 @@
+ }
+ 
+ // Set up the appropriate MediaSource depending on the chosen option
+-status_t StagefrightRecorder::setupMediaSource(
++status_t GonkRecorder::setupMediaSource(
+                       sp<MediaSource> *mediaSource) {
+     if (mVideoSource == VIDEO_SOURCE_DEFAULT
+             || mVideoSource == VIDEO_SOURCE_CAMERA) {
+-        sp<CameraSource> cameraSource;
++        sp<GonkCameraSource> cameraSource;
+         status_t err = setupCameraSource(&cameraSource);
+         if (err != OK) {
+             return err;
+         }
+         *mediaSource = cameraSource;
+     } else if (mVideoSource == VIDEO_SOURCE_GRALLOC_BUFFER) {
+-        // If using GRAlloc buffers, setup surfacemediasource.
+-        // Later a handle to that will be passed
+-        // to the client side when queried
+-        status_t err = setupSurfaceMediaSource();
+-        if (err != OK) {
+-            return err;
+-        }
+-        *mediaSource = mSurfaceMediaSource;
++        return BAD_VALUE;
+     } else {
+         return INVALID_OPERATION;
+     }
+     return OK;
+ }
+ 
+-// setupSurfaceMediaSource creates a source with the given
+-// width and height and framerate.
+-// TODO: This could go in a static function inside SurfaceMediaSource
+-// similar to that in CameraSource
+-status_t StagefrightRecorder::setupSurfaceMediaSource() {
+-    status_t err = OK;
+-    mSurfaceMediaSource = new SurfaceMediaSource(mVideoWidth, mVideoHeight);
+-    if (mSurfaceMediaSource == NULL) {
+-        return NO_INIT;
+-    }
+-
+-    if (mFrameRate == -1) {
+-        int32_t frameRate = 0;
+-        CHECK (mSurfaceMediaSource->getFormat()->findInt32(
+-                                        kKeyFrameRate, &frameRate));
+-        LOGI("Frame rate is not explicitly set. Use the current frame "
+-             "rate (%d fps)", frameRate);
+-        mFrameRate = frameRate;
+-    } else {
+-        err = mSurfaceMediaSource->setFrameRate(mFrameRate);
+-    }
+-    CHECK(mFrameRate != -1);
+-
+-    mIsMetaDataStoredInVideoBuffers =
+-        mSurfaceMediaSource->isMetaDataStoredInVideoBuffers();
+-    return err;
+-}
+-
+-status_t StagefrightRecorder::setupCameraSource(
+-        sp<CameraSource> *cameraSource) {
++status_t GonkRecorder::setupCameraSource(
++        sp<GonkCameraSource> *cameraSource) {
+     status_t err = OK;
+     if ((err = checkVideoEncoderCapabilities()) != OK) {
+         return err;
+@@ -1416,26 +1164,15 @@
+     Size videoSize;
+     videoSize.width = mVideoWidth;
+     videoSize.height = mVideoHeight;
+-    if (mCaptureTimeLapse) {
+-        mCameraSourceTimeLapse = CameraSourceTimeLapse::CreateFromCamera(
+-                mCamera, mCameraProxy, mCameraId,
+-                videoSize, mFrameRate, mPreviewSurface,
+-                mTimeBetweenTimeLapseFrameCaptureUs);
+-        *cameraSource = mCameraSourceTimeLapse;
+-    } else {
+-
+-        bool useMeta = true;
+-        char value[PROPERTY_VALUE_MAX];
+-        if (property_get("debug.camcorder.disablemeta", value, NULL) &&
++    bool useMeta = true;
++    char value[PROPERTY_VALUE_MAX];
++    if (property_get("debug.camcorder.disablemeta", value, NULL) &&
+             atoi(value)) {
+-            useMeta = false;
+-        }
+-        *cameraSource = CameraSource::CreateFromCamera(
+-                mCamera, mCameraProxy, mCameraId, videoSize, mFrameRate,
+-                mPreviewSurface, useMeta);
++      useMeta = false;
+     }
+-    mCamera.clear();
+-    mCameraProxy.clear();
++
++    *cameraSource = GonkCameraSource::Create(
++                mCameraHandle, videoSize, mFrameRate, useMeta);
+     if (*cameraSource == NULL) {
+         return UNKNOWN_ERROR;
+     }
+@@ -1465,7 +1202,7 @@
+     return OK;
+ }
+ 
+-status_t StagefrightRecorder::setupVideoEncoder(
++status_t GonkRecorder::setupVideoEncoder(
+         sp<MediaSource> cameraSource,
+         int32_t videoBitRate,
+         sp<MediaSource> *source) {
+@@ -1501,10 +1238,7 @@
+     CHECK(meta->findInt32(kKeyStride, &stride));
+     CHECK(meta->findInt32(kKeySliceHeight, &sliceHeight));
+     CHECK(meta->findInt32(kKeyColorFormat, &colorFormat));
+-    hfr = 0;
+-    if (!meta->findInt32(kKeyHFR, &hfr)) {
+-        LOGW("hfr not found, default to 0");
+-    }
++    CHECK(meta->findInt32(kKeyHFR, &hfr));
+ 
+     if(hfr) {
+       mMaxFileDurationUs = mMaxFileDurationUs * (hfr/mFrameRate);
+@@ -1598,30 +1332,17 @@
+         enc_meta->setInt32(kKey3D, is3D);
+     }
+ 
+-    OMXClient client;
+-    CHECK_EQ(client.connect(), OK);
+-
+     uint32_t encoder_flags = 0;
+     if (mIsMetaDataStoredInVideoBuffers) {
+         LOGW("Camera source supports metadata mode, create OMXCodec for metadata");
+         encoder_flags |= OMXCodec::kHardwareCodecsOnly;
+         encoder_flags |= OMXCodec::kStoreMetaDataInVideoBuffers;
+-        if (property_get("ro.board.platform", value, "0")
+-            && (!strncmp(value, "msm7627", sizeof("msm7627") - 1))) {
+-            LOGW("msm7627 family of chipsets supports, only one buffer at a time");
+-            encoder_flags |= OMXCodec::kOnlySubmitOneInputBufferAtOneTime;
+-        }
+-    }
+-
+-    // Do not wait for all the input buffers to become available.
+-    // This give timelapse video recording faster response in
+-    // receiving output from video encoder component.
+-    if (mCaptureTimeLapse) {
+         encoder_flags |= OMXCodec::kOnlySubmitOneInputBufferAtOneTime;
+     }
+ 
+     sp<MediaSource> encoder = OMXCodec::Create(
+-            client.interface(), enc_meta,
++            GetOMX(),
++            enc_meta,
+             true /* createEncoder */, cameraSource,
+             NULL, encoder_flags);
+     if (encoder == NULL) {
+@@ -1638,7 +1359,7 @@
+     return OK;
+ }
+ 
+-status_t StagefrightRecorder::setupAudioEncoder(const sp<MediaWriter>& writer) {
++status_t GonkRecorder::setupAudioEncoder(const sp<MediaWriter>& writer) {
+     status_t status = BAD_VALUE;
+     if (OK != (status = checkAudioEncoderCapabilities())) {
+         return status;
+@@ -1664,7 +1385,7 @@
+     return OK;
+ }
+ 
+-status_t StagefrightRecorder::setupMPEG4Recording(
++status_t GonkRecorder::setupMPEG4Recording(
+         int outputFd,
+         int32_t videoWidth, int32_t videoHeight,
+         int32_t videoBitRate,
+@@ -1696,7 +1417,7 @@
+     // Audio source is added at the end if it exists.
+     // This help make sure that the "recoding" sound is suppressed for
+     // camcorder applications in the recorded files.
+-    if (!mCaptureTimeLapse && (mAudioSource != AUDIO_SOURCE_CNT)) {
++    if (mAudioSource != AUDIO_SOURCE_CNT) {
+         err = setupAudioEncoder(writer);
+         if (err != OK) return err;
+         *totalBitRate += mAudioBitRate;
+@@ -1728,7 +1449,7 @@
+     return OK;
+ }
+ 
+-void StagefrightRecorder::setupMPEG4MetaData(int64_t startTimeUs, int32_t totalBitRate,
++void GonkRecorder::setupMPEG4MetaData(int64_t startTimeUs, int32_t totalBitRate,
+         sp<MetaData> *meta) {
+     (*meta)->setInt64(kKeyTime, startTimeUs);
+     (*meta)->setInt32(kKeyFileType, mOutputFormat);
+@@ -1752,7 +1473,7 @@
+     }
+ }
+ 
+-status_t StagefrightRecorder::startMPEG4Recording() {
++status_t GonkRecorder::startMPEG4Recording() {
+     int32_t totalBitRate;
+     status_t err = setupMPEG4Recording(
+             mOutputFd, mVideoWidth, mVideoHeight,
+@@ -1761,7 +1482,14 @@
+         return err;
+     }
+ 
+-    int64_t startTimeUs = systemTime() / 1000;
++    //systemTime() doesn't give correct time because
++    //HAVE_POSIX_CLOCKS is not defined for utils/Timers.cpp
++    //so, using clock_gettime directly
++#include <time.h>
++    struct timespec t;
++    clock_gettime(CLOCK_MONOTONIC, &t);
++    int64_t startTimeUs = int64_t(t.tv_sec)*1000000000LL + t.tv_nsec;
++    startTimeUs = startTimeUs / 1000;
+     sp<MetaData> meta = new MetaData;
+     setupMPEG4MetaData(startTimeUs, totalBitRate, &meta);
+ 
+@@ -1773,7 +1501,7 @@
+     return OK;
+ }
+ 
+-status_t StagefrightRecorder::pause() {
++status_t GonkRecorder::pause() {
+     LOGV("pause");
+     if (mWriter == NULL) {
+         return UNKNOWN_ERROR;
+@@ -1782,31 +1510,16 @@
+ 
+     if (mStarted) {
+         mStarted = false;
+-
+-        uint32_t params = 0;
+-        if (mAudioSource != AUDIO_SOURCE_CNT) {
+-            params |= IMediaPlayerService::kBatteryDataTrackAudio;
+-        }
+-        if (mVideoSource != VIDEO_SOURCE_LIST_END) {
+-            params |= IMediaPlayerService::kBatteryDataTrackVideo;
+-        }
+-
+-        addBatteryData(params);
+     }
+ 
+ 
+     return OK;
+ }
+ 
+-status_t StagefrightRecorder::stop() {
++status_t GonkRecorder::stop() {
+     LOGV("stop");
+     status_t err = OK;
+ 
+-    if (mCaptureTimeLapse && mCameraSourceTimeLapse != NULL) {
+-        mCameraSourceTimeLapse->startQuickReadReturns();
+-        mCameraSourceTimeLapse = NULL;
+-    }
+-
+     if (mWriter != NULL) {
+         err = mWriter->stop();
+         mWriter.clear();
+@@ -1819,30 +1532,20 @@
+ 
+     if (mStarted) {
+         mStarted = false;
+-
+-        uint32_t params = 0;
+-        if (mAudioSource != AUDIO_SOURCE_CNT) {
+-            params |= IMediaPlayerService::kBatteryDataTrackAudio;
+-        }
+-        if (mVideoSource != VIDEO_SOURCE_LIST_END) {
+-            params |= IMediaPlayerService::kBatteryDataTrackVideo;
+-        }
+-
+-        addBatteryData(params);
+     }
+ 
+ 
+     return err;
+ }
+ 
+-status_t StagefrightRecorder::close() {
++status_t GonkRecorder::close() {
+     LOGV("close");
+     stop();
+ 
+     return OK;
+ }
+ 
+-status_t StagefrightRecorder::reset() {
++status_t GonkRecorder::reset() {
+     LOGV("reset");
+     stop();
+ 
+@@ -1858,9 +1561,9 @@
+     mVideoHeight   = 144;
+     mFrameRate     = -1;
+     mVideoBitRate  = 192000;
+-    mSampleRate    = 0;
+-    mAudioChannels = 0;
+-    mAudioBitRate  = 0;
++    mSampleRate    = 8000;
++    mAudioChannels = 1;
++    mAudioBitRate  = 12200;
+     mInterleaveDurationUs = 0;
+     mIFramesIntervalSec = 2;
+     mAudioSourceNode = 0;
+@@ -1875,9 +1578,6 @@
+     mMaxFileDurationUs = 0;
+     mMaxFileSizeBytes = 0;
+     mTrackEveryTimeDurationUs = 0;
+-    mCaptureTimeLapse = false;
+-    mTimeBetweenTimeLapseFrameCaptureUs = -1;
+-    mCameraSourceTimeLapse = NULL;
+     mIsMetaDataStoredInVideoBuffers = false;
+     mEncoderProfiles = MediaProfiles::getInstance();
+     mRotationDegrees = 0;
+@@ -1885,6 +1585,11 @@
+     mLongitudex10000 = -3600000;
+ 
+     mOutputFd = -1;
++    mCameraHandle = -1;
++    //TODO: May need to register a listener eventually
++    //if someone is interested in recorder events for now
++    //default to no listener registered
++    mListener = NULL;
+ 
+     // Disable Audio Encoding
+     char value[PROPERTY_VALUE_MAX];
+@@ -1894,7 +1599,7 @@
+     return OK;
+ }
+ 
+-status_t StagefrightRecorder::getMaxAmplitude(int *max) {
++status_t GonkRecorder::getMaxAmplitude(int *max) {
+     LOGV("getMaxAmplitude");
+ 
+     if (max == NULL) {
+@@ -1911,7 +1616,7 @@
+     return OK;
+ }
+ 
+-status_t StagefrightRecorder::dump(
++status_t GonkRecorder::dump(
+         int fd, const Vector<String16>& args) const {
+     LOGV("dump");
+     const size_t SIZE = 256;
+@@ -1958,6 +1663,8 @@
+     result.append(buffer);
+     snprintf(buffer, SIZE, "     Camera Id: %d\n", mCameraId);
+     result.append(buffer);
++    snprintf(buffer, SIZE, "     Camera Handle: %d\n", mCameraHandle);
++    result.append(buffer);
+     snprintf(buffer, SIZE, "     Start time offset (ms): %d\n", mStartTimeOffsetMs);
+     result.append(buffer);
+     snprintf(buffer, SIZE, "     Encoder: %d\n", mVideoEncoder);
+@@ -1978,45 +1685,12 @@
+     return OK;
+ }
+ 
+-status_t StagefrightRecorder::startExtendedRecording() {
+-    CHECK(mOutputFormat == OUTPUT_FORMAT_QCP);
+-
+-    if (mSampleRate != 8000) {
+-        LOGE("Invalid sampling rate %d used for recording",
+-             mSampleRate);
+-        return BAD_VALUE;
+-    }
+-    if (mAudioChannels != 1) {
+-        LOGE("Invalid number of audio channels %d used for recording",
+-                mAudioChannels);
+-        return BAD_VALUE;
+-    }
+-
+-    if (mAudioSource >= AUDIO_SOURCE_CNT) {
+-        LOGE("Invalid audio source: %d", mAudioSource);
+-        return BAD_VALUE;
+-    }
+-
+-    sp<MediaSource> audioEncoder = createAudioSource();
+-
+-    if (audioEncoder == NULL) {
+-        LOGE("AudioEncoder NULL");
+-        return UNKNOWN_ERROR;
+-    }
+-
+-    mWriter = new ExtendedWriter(dup(mOutputFd));
+-    mWriter->addSource(audioEncoder);
+-
+-    if (mMaxFileDurationUs != 0) {
+-        mWriter->setMaxFileDuration(mMaxFileDurationUs);
+-    }
+-    if (mMaxFileSizeBytes != 0) {
+-        mWriter->setMaxFileSize(mMaxFileSizeBytes);
+-    }
+-    mWriter->setListener(mListener);
+-    mWriter->start();
+-
+-    return OK;
++status_t GonkRecorder::setCameraHandle(int32_t handle) {
++  if (handle < 0) {
++    return BAD_VALUE;
++  }
++  mCameraHandle = handle;
++  return OK;
+ }
+ 
+ }  // namespace android
+diff --git a/GonkRecorder.h b/GonkRecorder.h
+index dba6110..fa948af 100644
+--- a/GonkRecorder.h
++++ b/GonkRecorder.h
+@@ -14,11 +14,11 @@
+  * limitations under the License.
+  */
+ 
+-#ifndef STAGEFRIGHT_RECORDER_H_
++#ifndef GONK_RECORDER_H_
+ 
+-#define STAGEFRIGHT_RECORDER_H_
++#define GONK_RECORDER_H_
+ 
+-#include <media/MediaRecorderBase.h>
++#include <media/mediarecorder.h>
+ #include <camera/CameraParameters.h>
+ #include <utils/String8.h>
+ 
+@@ -26,21 +26,16 @@
+ 
+ namespace android {
+ 
+-class Camera;
+-class ICameraRecordingProxy;
+-class CameraSource;
+-class CameraSourceTimeLapse;
++class GonkCameraSource;
+ struct MediaSource;
+ struct MediaWriter;
+ class MetaData;
+ struct AudioSource;
+ class MediaProfiles;
+-class ISurfaceTexture;
+-class SurfaceMediaSource;
+ 
+-struct StagefrightRecorder : public MediaRecorderBase {
+-    StagefrightRecorder();
+-    virtual ~StagefrightRecorder();
++struct GonkRecorder {
++    GonkRecorder();
++    virtual ~GonkRecorder();
+ 
+     virtual status_t init();
+     virtual status_t setAudioSource(audio_source_t as);
+@@ -50,11 +45,10 @@
+     virtual status_t setVideoEncoder(video_encoder ve);
+     virtual status_t setVideoSize(int width, int height);
+     virtual status_t setVideoFrameRate(int frames_per_second);
+-    virtual status_t setCamera(const sp<ICamera>& camera, const sp<ICameraRecordingProxy>& proxy);
+-    virtual status_t setPreviewSurface(const sp<Surface>& surface);
+     virtual status_t setOutputFile(const char *path);
+     virtual status_t setOutputFile(int fd, int64_t offset, int64_t length);
+     virtual status_t setParameters(const String8& params);
++    virtual status_t setCameraHandle(int32_t handle);
+     virtual status_t setListener(const sp<IMediaRecorderClient>& listener);
+     virtual status_t prepare();
+     virtual status_t start();
+@@ -65,12 +59,8 @@
+     virtual status_t getMaxAmplitude(int *max);
+     virtual status_t dump(int fd, const Vector<String16>& args) const;
+     // Querying a SurfaceMediaSourcer
+-    virtual sp<ISurfaceTexture> querySurfaceMediaSource() const;
+ 
+ private:
+-    sp<ICamera> mCamera;
+-    sp<ICameraRecordingProxy> mCameraProxy;
+-    sp<Surface> mPreviewSurface;
+     sp<IMediaRecorderClient> mListener;
+     sp<MediaWriter> mWriter;
+     int mOutputFd;
+@@ -104,11 +94,6 @@
+     int32_t mLongitudex10000;
+     int32_t mStartTimeOffsetMs;
+ 
+-    bool mCaptureTimeLapse;
+-    int64_t mTimeBetweenTimeLapseFrameCaptureUs;
+-    sp<CameraSourceTimeLapse> mCameraSourceTimeLapse;
+-
+-
+     String8 mParams;
+ 
+     bool mIsMetaDataStoredInVideoBuffers;
+@@ -119,8 +104,8 @@
+     // An <ISurfaceTexture> pointer
+     // will be sent to the client side using which the
+     // frame buffers will be queued and dequeued
+-    sp<SurfaceMediaSource> mSurfaceMediaSource;
+     bool mDisableAudio;
++    int32_t mCameraHandle;
+ 
+     status_t setupMPEG4Recording(
+         int outputFd,
+@@ -132,10 +117,7 @@
+         sp<MetaData> *meta);
+     status_t startMPEG4Recording();
+     status_t startAMRRecording();
+-    status_t startFMA2DPWriter();
+-    status_t startAACRecording();
+     status_t startRawAudioRecording();
+-    status_t startRTPRecording();
+     status_t startMPEG2TSRecording();
+     sp<MediaSource> createAudioSource();
+     status_t checkVideoEncoderCapabilities();
+@@ -144,9 +126,8 @@
+     // source (CameraSource or SurfaceMediaSource)
+     // depending on the videosource type
+     status_t setupMediaSource(sp<MediaSource> *mediaSource);
+-    status_t setupCameraSource(sp<CameraSource> *cameraSource);
++    status_t setupCameraSource(sp<GonkCameraSource> *cameraSource);
+     // setup the surfacemediasource for the encoder
+-    status_t setupSurfaceMediaSource();
+ 
+     status_t setupAudioEncoder(const sp<MediaWriter>& writer);
+     status_t setupVideoEncoder(
+@@ -160,8 +141,6 @@
+     status_t setParamAudioNumberOfChannels(int32_t channles);
+     status_t setParamAudioSamplingRate(int32_t sampleRate);
+     status_t setParamAudioTimeScale(int32_t timeScale);
+-    status_t setParamTimeLapseEnable(int32_t timeLapseEnable);
+-    status_t setParamTimeBetweenTimeLapseFrameCapture(int64_t timeUs);
+     status_t setParamVideoEncodingBitRate(int32_t bitRate);
+     status_t setParamVideoIFramesInterval(int32_t seconds);
+     status_t setParamVideoEncoderProfile(int32_t profile);
+@@ -186,14 +165,10 @@
+     void clipNumberOfAudioChannels();
+     void setDefaultProfileIfNecessary();
+ 
+-
+-    StagefrightRecorder(const StagefrightRecorder &);
+-    StagefrightRecorder &operator=(const StagefrightRecorder &);
+-
+-    /* extension */
+-    status_t startExtendedRecording();
++    GonkRecorder(const GonkRecorder &);
++    GonkRecorder &operator=(const GonkRecorder &);
+ };
+ 
+ }  // namespace android
+ 
+-#endif  // STAGEFRIGHT_RECORDER_H_
++#endif  // GONK_RECORDER_H_
new file mode 100644
--- /dev/null
+++ b/dom/camera/update.sh
@@ -0,0 +1,14 @@
+# Usage: ./update.sh <android_ics_os_src_directory>
+#
+# Copies the needed files from the directory containing the original
+# Android ICS OS source and applies the B2G specific changes for the
+# camcorder functionality in B2G.
+cp $1/frameworks/base/media/libmediaplayerservice/StagefrightRecorder.cpp ./GonkRecorder.cpp
+cp $1/frameworks/base/media/libmediaplayerservice/StagefrightRecorder.h ./GonkRecorder.h
+cp $1/frameworks/base/media/libstagefright/CameraSource.cpp ./GonkCameraSource.cpp
+cp $1/frameworks/base/include/media/stagefright/CameraSource.h ./GonkCameraSource.h
+cp $1/frameworks/base/media/libmedia/AudioParameter.cpp ./AudioParameter.cpp
+cp $1/frameworks/base/include/camera/Camera.h ./GonkCameraListener.h
+patch -p1 <update.patch
+# If you import CAF sources, you also need to apply update2.patch
+patch -p1 <update2.patch
new file mode 100644
--- /dev/null
+++ b/dom/camera/update2.patch
@@ -0,0 +1,163 @@
+diff --git a/dom/camera/GonkCameraSource.cpp b/dom/camera/GonkCameraSource.cpp
+--- a/dom/camera/GonkCameraSource.cpp
++++ b/dom/camera/GonkCameraSource.cpp
+@@ -492,21 +492,17 @@ status_t GonkCameraSource::init(
+     mMeta = new MetaData;
+     mMeta->setCString(kKeyMIMEType,  MEDIA_MIMETYPE_VIDEO_RAW);
+     mMeta->setInt32(kKeyColorFormat, mColorFormat);
+     mMeta->setInt32(kKeyWidth,       mVideoSize.width);
+     mMeta->setInt32(kKeyHeight,      mVideoSize.height);
+     mMeta->setInt32(kKeyStride,      mVideoSize.width);
+     mMeta->setInt32(kKeySliceHeight, mVideoSize.height);
+     mMeta->setInt32(kKeyFrameRate,   mVideoFrameRate);
+-    mMeta->setInt32(kKeyHFR, hfr);
+ 
+-    if (want3D) {
+-        mMeta->setInt32(kKey3D, !0);
+-    }
+     return OK;
+ }
+ 
+ GonkCameraSource::~GonkCameraSource() {
+     if (mStarted) {
+         stop();
+     } else if (mInitCheck == OK) {
+         // Camera is initialized but because start() is never called,
+diff --git a/dom/camera/GonkRecorder.cpp b/dom/camera/GonkRecorder.cpp
+--- a/dom/camera/GonkRecorder.cpp
++++ b/dom/camera/GonkRecorder.cpp
+@@ -716,56 +716,16 @@ status_t GonkRecorder::start() {
+         mStarted = true;
+     }
+ 
+     return status;
+ }
+ 
+ sp<MediaSource> GonkRecorder::createAudioSource() {
+ 
+-    bool tunneledSource = false;
+-    const char *tunnelMime;
+-    {
+-        AudioParameter param;
+-        String8 key("tunneled-input-formats");
+-        param.add( key, String8("get") );
+-        String8 valueStr = AudioSystem::getParameters( 0, param.toString());
+-        AudioParameter result(valueStr);
+-        int value;
+-        if ( mAudioEncoder == AUDIO_ENCODER_AMR_NB &&
+-            result.getInt(String8("AMR"),value) == NO_ERROR ) {
+-            tunneledSource = true;
+-            tunnelMime = MEDIA_MIMETYPE_AUDIO_AMR_NB;
+-        }
+-        else if ( mAudioEncoder == AUDIO_ENCODER_QCELP &&
+-            result.getInt(String8("QCELP"),value) == NO_ERROR ) {
+-            tunneledSource = true;
+-            tunnelMime = MEDIA_MIMETYPE_AUDIO_QCELP;
+-        }
+-        else if ( mAudioEncoder == AUDIO_ENCODER_EVRC &&
+-            result.getInt(String8("EVRC"),value) == NO_ERROR ) {
+-            tunneledSource = true;
+-            tunnelMime = MEDIA_MIMETYPE_AUDIO_EVRC;
+-        }
+-    }
+-
+-    if ( tunneledSource ) {
+-        sp<AudioSource> audioSource = NULL;
+-        sp<MetaData> meta = new MetaData;
+-        meta->setInt32(kKeyChannelCount, mAudioChannels);
+-        meta->setInt32(kKeySampleRate, mSampleRate);
+-        meta->setInt32(kKeyBitRate, mAudioBitRate);
+-        if (mAudioTimeScale > 0) {
+-            meta->setInt32(kKeyTimeScale, mAudioTimeScale);
+-        }
+-        meta->setCString( kKeyMIMEType, tunnelMime );
+-        audioSource = new AudioSource( mAudioSource, meta);
+-        return audioSource->initCheck( ) == OK ? audioSource : NULL;
+-    }
+-
+     sp<AudioSource> audioSource =
+         new AudioSource(
+                 mAudioSource,
+                 mSampleRate,
+                 mAudioChannels);
+ 
+     status_t err = audioSource->initCheck();
+ 
+@@ -1226,56 +1186,33 @@ status_t GonkRecorder::setupVideoEncoder
+ 
+         default:
+             CHECK(!"Should not be here, unsupported video encoding.");
+             break;
+     }
+ 
+     sp<MetaData> meta = cameraSource->getFormat();
+ 
+-    int32_t width, height, stride, sliceHeight, colorFormat, hfr, is3D;
++    int32_t width, height, stride, sliceHeight, colorFormat;
+     CHECK(meta->findInt32(kKeyWidth, &width));
+     CHECK(meta->findInt32(kKeyHeight, &height));
+     CHECK(meta->findInt32(kKeyStride, &stride));
+     CHECK(meta->findInt32(kKeySliceHeight, &sliceHeight));
+     CHECK(meta->findInt32(kKeyColorFormat, &colorFormat));
+-    CHECK(meta->findInt32(kKeyHFR, &hfr));
+-
+-    if(hfr) {
+-      mMaxFileDurationUs = mMaxFileDurationUs * (hfr/mFrameRate);
+-    }
+-
+ 
+     enc_meta->setInt32(kKeyWidth, width);
+     enc_meta->setInt32(kKeyHeight, height);
+     enc_meta->setInt32(kKeyIFramesInterval, mIFramesIntervalSec);
+     enc_meta->setInt32(kKeyStride, stride);
+     enc_meta->setInt32(kKeySliceHeight, sliceHeight);
+     enc_meta->setInt32(kKeyColorFormat, colorFormat);
+-    enc_meta->setInt32(kKeyHFR, hfr);
+     if (mVideoTimeScale > 0) {
+         enc_meta->setInt32(kKeyTimeScale, mVideoTimeScale);
+     }
+ 
+-    char mDeviceName[100];
+-    property_get("ro.board.platform",mDeviceName,"0");
+-    if(!strncmp(mDeviceName, "msm7627a", 8)) {
+-      if(hfr && (width * height > 432*240)) {
+-        LOGE("HFR mode is supported only upto WQVGA resolution");
+-        return INVALID_OPERATION;
+-      }
+-    }
+-    else {
+-      if(hfr && ((mVideoEncoder != VIDEO_ENCODER_H264) || (width * height > 800*480))) {
+-        LOGE("HFR mode is supported only upto WVGA and H264 codec.");
+-        return INVALID_OPERATION;
+-      }
+-    }
+-
+-
+     /*
+      * can set profile from the app as a parameter.
+      * For the mean time, set from shell
+      */
+ 
+     char value[PROPERTY_VALUE_MAX];
+     bool customProfile = false;
+ 
+@@ -1322,19 +1259,16 @@ status_t GonkRecorder::setupVideoEncoder
+     }
+ 
+     if (mVideoEncoderProfile != -1) {
+         enc_meta->setInt32(kKeyVideoProfile, mVideoEncoderProfile);
+     }
+     if (mVideoEncoderLevel != -1) {
+         enc_meta->setInt32(kKeyVideoLevel, mVideoEncoderLevel);
+     }
+-    if (meta->findInt32(kKey3D, &is3D)) {
+-        enc_meta->setInt32(kKey3D, is3D);
+-    }
+ 
+     uint32_t encoder_flags = 0;
+     if (mIsMetaDataStoredInVideoBuffers) {
+         LOGW("Camera source supports metadata mode, create OMXCodec for metadata");
+         encoder_flags |= OMXCodec::kHardwareCodecsOnly;
+         encoder_flags |= OMXCodec::kStoreMetaDataInVideoBuffers;
+         encoder_flags |= OMXCodec::kOnlySubmitOneInputBufferAtOneTime;
+     }
--- a/dom/devicestorage/DeviceStorageRequestChild.cpp
+++ b/dom/devicestorage/DeviceStorageRequestChild.cpp
@@ -50,17 +50,18 @@ DeviceStorageRequestChild::Recv__delete_
     }
 
     case DeviceStorageResponseValue::TBlobResponse:
     {
       BlobResponse r = aValue;
       BlobChild* actor = static_cast<BlobChild*>(r.blobChild());
       nsCOMPtr<nsIDOMBlob> blob = actor->GetBlob();
 
-      jsval result = InterfaceToJsval(mRequest->GetOwner(), blob, &NS_GET_IID(nsIDOMBlob));
+      nsCOMPtr<nsIDOMFile> file = do_QueryInterface(blob);
+      jsval result = InterfaceToJsval(mRequest->GetOwner(), file, &NS_GET_IID(nsIDOMFile));
       mRequest->FireSuccess(result);
       break;
     }
 
     case DeviceStorageResponseValue::TStatStorageResponse:
     {
       StatStorageResponse r = aValue;
 
--- a/dom/devicestorage/nsDeviceStorage.cpp
+++ b/dom/devicestorage/nsDeviceStorage.cpp
@@ -1909,16 +1909,27 @@ nsDOMDeviceStorage::Stat(nsIDOMDOMReques
                                                      mPrincipal,
                                                      dsf,
                                                      request);
   NS_DispatchToMainThread(r);
   return NS_OK;
 }
 
 NS_IMETHODIMP
+nsDOMDeviceStorage::GetRootDirectory(nsIFile** aRootDirectory)
+{
+  if (!mRootDirectory) {
+    return NS_ERROR_FAILURE;
+  }
+
+  nsCOMPtr<nsIFile> file;
+  return mRootDirectory->Clone(aRootDirectory);
+}
+
+NS_IMETHODIMP
 nsDOMDeviceStorage::Enumerate(const JS::Value & aName,
                              const JS::Value & aOptions,
                              JSContext* aCx,
                              uint8_t aArgc,
                              nsIDOMDeviceStorageCursor** aRetval)
 {
   return EnumerateInternal(aName, aOptions, aCx, aArgc, false, aRetval);
 }
--- a/dom/interfaces/devicestorage/nsIDOMDeviceStorage.idl
+++ b/dom/interfaces/devicestorage/nsIDOMDeviceStorage.idl
@@ -4,23 +4,24 @@
 
 #include "domstubs.idl"
 #include "nsIDOMEventTarget.idl"
 interface nsIDOMBlob;
 interface nsIDOMDOMRequest;
 interface nsIDOMDeviceStorageCursor;
 interface nsIDOMDeviceStorageChangeEvent;
 interface nsIDOMEventListener;
+interface nsIFile;
 
 dictionary DeviceStorageEnumerationParameters
 {
   jsval since;
 };
 
-[scriptable, uuid(7efbe025-3a8a-4151-9257-3e8c941dc099), builtinclass]
+[scriptable, uuid(7f69936f-2948-4733-ba41-c7e1d657a88b), builtinclass]
 interface nsIDOMDeviceStorage : nsIDOMEventTarget
 {
     [implicit_jscontext] attribute jsval onchange;
     nsIDOMDOMRequest add(in nsIDOMBlob aBlob);
     nsIDOMDOMRequest addNamed(in nsIDOMBlob aBlob, in DOMString aName);
 
     [implicit_jscontext]
     nsIDOMDOMRequest get(in jsval aName);
@@ -33,9 +34,11 @@ interface nsIDOMDeviceStorage : nsIDOMEv
 
     [optional_argc, implicit_jscontext]
     nsIDOMDeviceStorageCursor enumerate([optional] in jsval aName, /* DeviceStorageEnumerationParameters */ [optional] in jsval options);
 
     [optional_argc, implicit_jscontext]
     nsIDOMDeviceStorageCursor enumerateEditable([optional] in jsval aName, /* DeviceStorageEnumerationParameters */ [optional] in jsval options);
 
     nsIDOMDOMRequest stat();
+
+    [noscript] readonly attribute nsIFile rootDirectory;
 };
--- a/js/xpconnect/src/dictionary_helper_gen.conf
+++ b/js/xpconnect/src/dictionary_helper_gen.conf
@@ -16,17 +16,18 @@ dictionaries = [
      [ 'GeoPositionOptions', 'nsIDOMGeoGeolocation.idl' ],
      [ 'DOMFileMetadataParameters', 'nsIDOMLockedFile.idl' ],
      [ 'XMLHttpRequestParameters', 'nsIXMLHttpRequest.idl' ],
      [ 'DeviceStorageEnumerationParameters', 'nsIDOMDeviceStorage.idl' ],
      [ 'CameraSize', 'nsIDOMCameraManager.idl' ],
      [ 'CameraRegion', 'nsIDOMCameraManager.idl' ],
      [ 'CameraPosition', 'nsIDOMCameraManager.idl' ],
      [ 'CameraSelector', 'nsIDOMCameraManager.idl' ],
-     [ 'CameraPictureOptions', 'nsIDOMCameraManager.idl' ]
+     [ 'CameraPictureOptions', 'nsIDOMCameraManager.idl' ],
+     [ 'CameraRecordingOptions', 'nsIDOMCameraManager.idl' ]
    ]
 
 # include file names
 special_includes = [
     'nsContentUtils.h',
     'XPCQuickStubs.h',
     'nsIDOMApplicationRegistry.h'
   ]
--- a/toolkit/library/Makefile.in
+++ b/toolkit/library/Makefile.in
@@ -113,16 +113,20 @@ endif #}
 ifdef MOZ_B2G_BT #{
 STATIC_LIBS += mozdbus_s mozipcunixsocket_s
 endif #}
 
 ifeq (gonk,$(MOZ_WIDGET_TOOLKIT))
 STATIC_LIBS += moznetd_s
 endif
 
+ifdef MOZ_B2G_CAMERA #{
+OS_LIBS += -lstagefright -lstagefright_omx
+endif #}
+
 ifdef MOZ_IPDL_TESTS
 STATIC_LIBS += ipdlunittest_s
 endif
 
 ifeq (Linux,$(OS_ARCH))
 ifneq (Android,$(OS_TARGET))
 OS_LIBS += -lrt
 endif