Bug 965420 - camera face-tracking, r=bz,dhylands
authorMike Habicher <mikeh@mozilla.com>
Wed, 09 Apr 2014 11:52:10 -0400
changeset 189904 e6ac52f874d097648fcf91fd2db247af3a297d6b
parent 189903 3f481b3c3e7120b8c2a708b9b477f7180c23d356
child 189905 72a05425a854e7b482b61101a52ed3cb8551eb8f
push id5832
push userraliiev@mozilla.com
push dateMon, 28 Apr 2014 18:43:22 +0000
treeherdermozilla-aurora@4974d9da2f7d [default view] [failures only]
perfherder[talos] [build metrics] [platform microbench] (compared to previous push)
reviewersbz, dhylands
bugs965420
milestone31.0a1
Bug 965420 - camera face-tracking, r=bz,dhylands
dom/bindings/Bindings.conf
dom/camera/CameraControlImpl.cpp
dom/camera/CameraControlImpl.h
dom/camera/CameraControlListener.h
dom/camera/DOMCameraCapabilities.cpp
dom/camera/DOMCameraCapabilities.h
dom/camera/DOMCameraControl.cpp
dom/camera/DOMCameraControl.h
dom/camera/DOMCameraControlListener.cpp
dom/camera/DOMCameraControlListener.h
dom/camera/DOMCameraDetectedFace.cpp
dom/camera/DOMCameraDetectedFace.h
dom/camera/FallbackCameraCapabilities.cpp
dom/camera/FallbackCameraControl.cpp
dom/camera/GonkCameraControl.cpp
dom/camera/GonkCameraControl.h
dom/camera/GonkCameraHwMgr.cpp
dom/camera/GonkCameraHwMgr.h
dom/camera/GonkCameraParameters.cpp
dom/camera/ICameraControl.h
dom/camera/TestGonkCameraHardware.cpp
dom/camera/TestGonkCameraHardware.h
dom/camera/moz.build
dom/camera/test/mochitest.ini
dom/camera/test/test_camera_hardware_face_detection.html
dom/tests/mochitest/general/test_interfaces.html
dom/webidl/CameraCapabilities.webidl
dom/webidl/CameraControl.webidl
modules/libpref/src/init/all.js
xpcom/glue/nsCycleCollectionParticipant.h
--- a/dom/bindings/Bindings.conf
+++ b/dom/bindings/Bindings.conf
@@ -180,21 +180,31 @@ DOMInterfaces = {
 'CameraControl': {
     'nativeType': 'mozilla::nsDOMCameraControl',
     'headerFile': 'DOMCameraControl.h',
     'binaryNames': {
         "release": "ReleaseHardware"
     }
 },
 
+'CameraDetectedFace': {
+    'nativeType': 'mozilla::dom::DOMCameraDetectedFace',
+    'headerFile': 'DOMCameraDetectedFace.h'
+},
+
 'CameraManager': {
     'nativeType': 'nsDOMCameraManager',
     'headerFile': 'DOMCameraManager.h'
 },
 
+'CameraPoint': {
+    'nativeType': 'mozilla::dom::DOMCameraPoint',
+    'headerFile': 'DOMCameraDetectedFace.h'
+},
+
 'CanvasRenderingContext2D': {
     'implicitJSContext': [
         'createImageData', 'getImageData'
     ],
     'resultNotAddRefed': [ 'canvas', 'measureText' ],
     'binaryNames': {
         'mozImageSmoothingEnabled': 'imageSmoothingEnabled',
         'mozFillRule': 'fillRule'
--- a/dom/camera/CameraControlImpl.cpp
+++ b/dom/camera/CameraControlImpl.cpp
@@ -33,16 +33,27 @@ CameraControlImpl::CameraControlImpl(uin
     unused << rv; // swallow rv to suppress a compiler warning when the macro
                   // is #defined to nothing (i.e. in non-DEBUG builds).
     MOZ_ASSERT(NS_SUCCEEDED(rv));
 
     // keep a weak reference to the new thread
     sCameraThread = do_GetWeakReference(mCameraThread);
   }
 
+  // Care must be taken with the mListenerLock read-write lock to prevent
+  // deadlocks. Currently this is handled by ensuring that any attempts to
+  // acquire the lock for writing (as in Add/RemoveListener()) happen in a
+  // runnable dispatched to the Camera Thread--even if the method is being
+  // called from that thread. This ensures that if a registered listener
+  // (which is invoked with a read-lock) tries to call Add/RemoveListener(),
+  // the lock-for-writing attempt won't happen until the listener has
+  // completed.
+  //
+  // Multiple parallel listeners being invoked are not a problem because
+  // the read-write lock allows multiple simultaneous read-locks.
   mListenerLock = PR_NewRWLock(PR_RWLOCK_RANK_NONE, "CameraControlImpl.Listeners.Lock");
 }
 
 CameraControlImpl::~CameraControlImpl()
 {
   if (mListenerLock) {
     PR_DestroyRWLock(mListenerLock);
     mListenerLock = nullptr;
@@ -127,16 +138,30 @@ CameraControlImpl::OnAutoFocusMoving(boo
 
   for (uint32_t i = 0; i < mListeners.Length(); ++i) {
     CameraControlListener* l = mListeners[i];
     l->OnAutoFocusMoving(aIsMoving);
   }
 }
 
 void
+CameraControlImpl::OnFacesDetected(const nsTArray<Face>& aFaces)
+{
+  // This callback can run on threads other than the Main Thread and
+  //  the Camera Thread. On Gonk, it is called from the camera
+  //  library's face detection thread.
+  RwLockAutoEnterRead lock(mListenerLock);
+
+  for (uint32_t i = 0; i < mListeners.Length(); ++i) {
+    CameraControlListener* l = mListeners[i];
+    l->OnFacesDetected(aFaces);
+  }
+}
+
+void
 CameraControlImpl::OnTakePictureComplete(uint8_t* aData, uint32_t aLength, const nsAString& aMimeType)
 {
   // This callback can run on threads other than the Main Thread and
   //  the Camera Thread. On Gonk, it is called from the camera
   //  library's snapshot thread.
   RwLockAutoEnterRead lock(mListenerLock);
 
   for (uint32_t i = 0; i < mListeners.Length(); ++i) {
@@ -411,16 +436,60 @@ CameraControlImpl::AutoFocus(bool aCance
     bool mCancelExistingCall;
   };
 
   return mCameraThread->Dispatch(
     new Message(this, CameraControlListener::kInAutoFocus, aCancelExistingCall), NS_DISPATCH_NORMAL);
 }
 
 nsresult
+CameraControlImpl::StartFaceDetection()
+{
+  class Message : public ControlMessage
+  {
+  public:
+    Message(CameraControlImpl* aCameraControl,
+            CameraControlListener::CameraErrorContext aContext)
+      : ControlMessage(aCameraControl, aContext)
+    { }
+
+    nsresult
+    RunImpl() MOZ_OVERRIDE
+    {
+      return mCameraControl->StartFaceDetectionImpl();
+    }
+  };
+
+  return mCameraThread->Dispatch(
+    new Message(this, CameraControlListener::kInStartFaceDetection), NS_DISPATCH_NORMAL);
+}
+
+nsresult
+CameraControlImpl::StopFaceDetection()
+{
+  class Message : public ControlMessage
+  {
+  public:
+    Message(CameraControlImpl* aCameraControl,
+            CameraControlListener::CameraErrorContext aContext)
+      : ControlMessage(aCameraControl, aContext)
+    { }
+
+    nsresult
+    RunImpl() MOZ_OVERRIDE
+    {
+      return mCameraControl->StopFaceDetectionImpl();
+    }
+  };
+
+  return mCameraThread->Dispatch(
+    new Message(this, CameraControlListener::kInStopFaceDetection), NS_DISPATCH_NORMAL);
+}
+
+nsresult
 CameraControlImpl::TakePicture()
 {
   class Message : public ControlMessage
   {
   public:
     Message(CameraControlImpl* aCameraControl,
             CameraControlListener::CameraErrorContext aContext)
       : ControlMessage(aCameraControl, aContext)
--- a/dom/camera/CameraControlImpl.h
+++ b/dom/camera/CameraControlImpl.h
@@ -38,16 +38,18 @@ public:
 
   virtual nsresult Start(const Configuration* aConfig = nullptr) MOZ_OVERRIDE;
   virtual nsresult Stop() MOZ_OVERRIDE;
 
   virtual nsresult SetConfiguration(const Configuration& aConfig) MOZ_OVERRIDE;
   virtual nsresult StartPreview() MOZ_OVERRIDE;
   virtual nsresult StopPreview() MOZ_OVERRIDE;
   virtual nsresult AutoFocus(bool aCancelExistingCall) MOZ_OVERRIDE;
+  virtual nsresult StartFaceDetection() MOZ_OVERRIDE;
+  virtual nsresult StopFaceDetection() MOZ_OVERRIDE;
   virtual nsresult TakePicture() MOZ_OVERRIDE;
   virtual nsresult StartRecording(DeviceStorageFileDescriptor* aFileDescriptor,
                                   const StartRecordingOptions* aOptions) MOZ_OVERRIDE;
   virtual nsresult StopRecording() MOZ_OVERRIDE;
 
   already_AddRefed<RecorderProfileManager> GetRecorderProfileManager();
   uint32_t GetCameraId() { return mCameraId; }
 
@@ -58,16 +60,17 @@ public:
   void OnClosed();
   void OnError(CameraControlListener::CameraErrorContext aContext,
                CameraControlListener::CameraError aError);
   void OnAutoFocusMoving(bool aIsMoving);
 
 protected:
   // Event handlers.
   void OnAutoFocusComplete(bool aAutoFocusSucceeded);
+  void OnFacesDetected(const nsTArray<Face>& aFaces);
   void OnTakePictureComplete(uint8_t* aData, uint32_t aLength, const nsAString& aMimeType);
 
   bool OnNewPreviewFrame(layers::Image* aImage, uint32_t aWidth, uint32_t aHeight);
   void OnRecorderStateChange(CameraControlListener::RecorderState aState,
                              int32_t aStatus = -1, int32_t aTrackNumber = -1);
   void OnPreviewStateChange(CameraControlListener::PreviewState aState);
   void OnHardwareStateChange(CameraControlListener::HardwareState aState);
   void OnConfigurationChange();
@@ -95,16 +98,18 @@ protected:
   class ListenerMessage;
 
   virtual nsresult StartImpl(const Configuration* aConfig = nullptr) = 0;
   virtual nsresult StopImpl() = 0;
   virtual nsresult SetConfigurationImpl(const Configuration& aConfig) = 0;
   virtual nsresult StartPreviewImpl() = 0;
   virtual nsresult StopPreviewImpl() = 0;
   virtual nsresult AutoFocusImpl(bool aCancelExistingCall) = 0;
+  virtual nsresult StartFaceDetectionImpl() = 0;
+  virtual nsresult StopFaceDetectionImpl() = 0;
   virtual nsresult TakePictureImpl() = 0;
   virtual nsresult StartRecordingImpl(DeviceStorageFileDescriptor* aFileDescriptor,
                                       const StartRecordingOptions* aOptions) = 0;
   virtual nsresult StopRecordingImpl() = 0;
   virtual nsresult PushParametersImpl() = 0;
   virtual nsresult PullParametersImpl() = 0;
   virtual already_AddRefed<RecorderProfileManager> GetRecorderProfileManagerImpl() = 0;
 
--- a/dom/camera/CameraControlListener.h
+++ b/dom/camera/CameraControlListener.h
@@ -75,22 +75,25 @@ public:
     uint32_t mMaxMeteringAreas;
     uint32_t mMaxFocusAreas;
   };
   virtual void OnConfigurationChange(const CameraListenerConfiguration& aConfiguration) { }
 
   virtual void OnAutoFocusComplete(bool aAutoFocusSucceeded) { }
   virtual void OnAutoFocusMoving(bool aIsMoving) { }
   virtual void OnTakePictureComplete(uint8_t* aData, uint32_t aLength, const nsAString& aMimeType) { }
+  virtual void OnFacesDetected(const nsTArray<ICameraControl::Face>& aFaces) { }
 
   enum CameraErrorContext
   {
     kInStartCamera,
     kInStopCamera,
     kInAutoFocus,
+    kInStartFaceDetection,
+    kInStopFaceDetection,
     kInTakePicture,
     kInStartRecording,
     kInStopRecording,
     kInSetConfiguration,
     kInStartPreview,
     kInStopPreview,
     kInUnspecified
   };
--- a/dom/camera/DOMCameraCapabilities.cpp
+++ b/dom/camera/DOMCameraCapabilities.cpp
@@ -144,16 +144,21 @@ CameraCapabilities::Populate(ICameraCont
   rv = aCameraControl->Get(CAMERA_PARAM_SUPPORTED_MAXFOCUSAREAS, areas);
   LOG_IF_ERROR(rv, CAMERA_PARAM_SUPPORTED_MAXFOCUSAREAS);
   mMaxFocusAreas = areas < 0 ? 0 : areas;
 
   rv = aCameraControl->Get(CAMERA_PARAM_SUPPORTED_MAXMETERINGAREAS, areas);
   LOG_IF_ERROR(rv, CAMERA_PARAM_SUPPORTED_MAXMETERINGAREAS);
   mMaxMeteringAreas = areas < 0 ? 0 : areas;
 
+  int32_t faces;
+  rv = aCameraControl->Get(CAMERA_PARAM_SUPPORTED_MAXDETECTEDFACES, faces);
+  LOG_IF_ERROR(rv, CAMERA_PARAM_SUPPORTED_MAXDETECTEDFACES);
+  mMaxDetectedFaces = faces < 0 ? 0 : faces;
+
   rv = aCameraControl->Get(CAMERA_PARAM_SUPPORTED_MINEXPOSURECOMPENSATION, mMinExposureCompensation);
   LOG_IF_ERROR(rv, CAMERA_PARAM_SUPPORTED_MINEXPOSURECOMPENSATION);
 
   rv = aCameraControl->Get(CAMERA_PARAM_SUPPORTED_MAXEXPOSURECOMPENSATION, mMaxExposureCompensation);
   LOG_IF_ERROR(rv, CAMERA_PARAM_SUPPORTED_MAXEXPOSURECOMPENSATION);
 
   rv = aCameraControl->Get(CAMERA_PARAM_SUPPORTED_EXPOSURECOMPENSATIONSTEP, mExposureCompensationStep);
   LOG_IF_ERROR(rv, CAMERA_PARAM_SUPPORTED_EXPOSURECOMPENSATIONSTEP);
@@ -252,16 +257,22 @@ CameraCapabilities::MaxFocusAreas() cons
 }
 
 uint32_t
 CameraCapabilities::MaxMeteringAreas() const
 {
   return mMaxMeteringAreas;
 }
 
+uint32_t
+CameraCapabilities::MaxDetectedFaces() const
+{
+  return mMaxDetectedFaces;
+}
+
 double
 CameraCapabilities::MinExposureCompensation() const
 {
   return mMinExposureCompensation;
 }
 
 double
 CameraCapabilities::MaxExposureCompensation() const
--- a/dom/camera/DOMCameraCapabilities.h
+++ b/dom/camera/DOMCameraCapabilities.h
@@ -50,16 +50,17 @@ public:
   void GetWhiteBalanceModes(nsTArray<nsString>& aRetVal) const;
   void GetSceneModes(nsTArray<nsString>& aRetVal) const;
   void GetEffects(nsTArray<nsString>& aRetVal) const;
   void GetFlashModes(nsTArray<nsString>& aRetVal) const;
   void GetFocusModes(nsTArray<nsString>& aRetVal) const;
   void GetZoomRatios(nsTArray<double>& aRetVal) const;
   uint32_t MaxFocusAreas() const;
   uint32_t MaxMeteringAreas() const;
+  uint32_t MaxDetectedFaces() const;
   double MinExposureCompensation() const;
   double MaxExposureCompensation() const;
   double ExposureCompensationStep() const;
   JS::Value RecorderProfiles(JSContext* cx) const;
   void GetIsoModes(nsTArray<nsString>& aRetVal) const;
 
 protected:
   nsresult TranslateToDictionary(ICameraControl* aCameraControl,
@@ -77,16 +78,17 @@ protected:
   nsTArray<nsString> mFlashModes;
   nsTArray<nsString> mFocusModes;
   nsTArray<nsString> mIsoModes;
 
   nsTArray<double> mZoomRatios;
 
   uint32_t mMaxFocusAreas;
   uint32_t mMaxMeteringAreas;
+  uint32_t mMaxDetectedFaces;
 
   double mMinExposureCompensation;
   double mMaxExposureCompensation;
   double mExposureCompensationStep;
 
   nsRefPtr<RecorderProfileManager> mRecorderProfileManager;
   JS::Heap<JS::Value> mRecorderProfiles;
 
--- a/dom/camera/DOMCameraControl.cpp
+++ b/dom/camera/DOMCameraControl.cpp
@@ -24,31 +24,32 @@
 #include "DOMCameraManager.h"
 #include "DOMCameraCapabilities.h"
 #include "CameraCommon.h"
 #include "nsGlobalWindow.h"
 #include "CameraPreviewMediaStream.h"
 #include "mozilla/dom/CameraControlBinding.h"
 #include "mozilla/dom/CameraManagerBinding.h"
 #include "mozilla/dom/CameraCapabilitiesBinding.h"
+#include "DOMCameraDetectedFace.h"
 #include "mozilla/dom/BindingUtils.h"
 
 using namespace mozilla;
 using namespace mozilla::dom;
 using namespace mozilla::ipc;
 
 NS_INTERFACE_MAP_BEGIN_CYCLE_COLLECTION_INHERITED(nsDOMCameraControl)
   NS_INTERFACE_MAP_ENTRY(nsISupports)
   NS_INTERFACE_MAP_ENTRY(nsIDOMMediaStream)
 NS_INTERFACE_MAP_END_INHERITING(DOMMediaStream)
 
 NS_IMPL_ADDREF_INHERITED(nsDOMCameraControl, DOMMediaStream)
 NS_IMPL_RELEASE_INHERITED(nsDOMCameraControl, DOMMediaStream)
 
-NS_IMPL_CYCLE_COLLECTION_INHERITED_19(nsDOMCameraControl, DOMMediaStream,
+NS_IMPL_CYCLE_COLLECTION_INHERITED_20(nsDOMCameraControl, DOMMediaStream,
                                       mCapabilities,
                                       mWindow,
                                       mGetCameraOnSuccessCb,
                                       mGetCameraOnErrorCb,
                                       mAutoFocusOnSuccessCb,
                                       mAutoFocusOnErrorCb,
                                       mTakePictureOnSuccessCb,
                                       mTakePictureOnErrorCb,
@@ -57,17 +58,18 @@ NS_IMPL_CYCLE_COLLECTION_INHERITED_19(ns
                                       mReleaseOnSuccessCb,
                                       mReleaseOnErrorCb,
                                       mSetConfigurationOnSuccessCb,
                                       mSetConfigurationOnErrorCb,
                                       mOnShutterCb,
                                       mOnClosedCb,
                                       mOnRecorderStateChangeCb,
                                       mOnPreviewStateChangeCb,
-                                      mOnAutoFocusMovingCb)
+                                      mOnAutoFocusMovingCb,
+                                      mOnFacesDetectedCb)
 
 class mozilla::StartRecordingHelper : public nsIDOMEventListener
 {
 public:
   NS_DECL_ISUPPORTS
   NS_DECL_NSIDOMEVENTLISTENER
 
   StartRecordingHelper(nsDOMCameraControl* aDOMCameraControl)
@@ -139,16 +141,17 @@ nsDOMCameraControl::nsDOMCameraControl(u
   , mReleaseOnErrorCb(nullptr)
   , mSetConfigurationOnSuccessCb(nullptr)
   , mSetConfigurationOnErrorCb(nullptr)
   , mOnShutterCb(nullptr)
   , mOnClosedCb(nullptr)
   , mOnRecorderStateChangeCb(nullptr)
   , mOnPreviewStateChangeCb(nullptr)
   , mOnAutoFocusMovingCb(nullptr)
+  , mOnFacesDetectedCb(nullptr)
   , mWindow(aWindow)
 {
   DOM_CAMERA_LOGT("%s:%d : this=%p\n", __func__, __LINE__, this);
   mInput = new CameraPreviewMediaStream(this);
 
   SetIsDOMBinding();
 
   nsRefPtr<DOMCameraConfiguration> initialConfig =
@@ -607,81 +610,84 @@ nsDOMCameraControl::SensorAngle()
 {
   MOZ_ASSERT(mCameraControl);
 
   int32_t angle = 0;
   mCameraControl->Get(CAMERA_PARAM_SENSORANGLE, angle);
   return angle;
 }
 
-already_AddRefed<CameraShutterCallback>
+// Callback attributes
+
+CameraShutterCallback*
 nsDOMCameraControl::GetOnShutter()
 {
-  nsRefPtr<CameraShutterCallback> cb = mOnShutterCb;
-  return cb.forget();
+  return mOnShutterCb;
 }
-
 void
 nsDOMCameraControl::SetOnShutter(CameraShutterCallback* aCb)
 {
   mOnShutterCb = aCb;
 }
 
-/* attribute CameraClosedCallback onClosed; */
-already_AddRefed<CameraClosedCallback>
+CameraClosedCallback*
 nsDOMCameraControl::GetOnClosed()
 {
-  nsRefPtr<CameraClosedCallback> onClosed = mOnClosedCb;
-  return onClosed.forget();
+  return mOnClosedCb;
 }
-
 void
 nsDOMCameraControl::SetOnClosed(CameraClosedCallback* aCb)
 {
   mOnClosedCb = aCb;
 }
 
-already_AddRefed<CameraRecorderStateChange>
+CameraRecorderStateChange*
 nsDOMCameraControl::GetOnRecorderStateChange()
 {
-  nsRefPtr<CameraRecorderStateChange> cb = mOnRecorderStateChangeCb;
-  return cb.forget();
+  return mOnRecorderStateChangeCb;
 }
-
 void
 nsDOMCameraControl::SetOnRecorderStateChange(CameraRecorderStateChange* aCb)
 {
   mOnRecorderStateChangeCb = aCb;
 }
 
-/* attribute CameraPreviewStateChange onPreviewStateChange; */
-already_AddRefed<CameraPreviewStateChange>
+CameraPreviewStateChange*
 nsDOMCameraControl::GetOnPreviewStateChange()
 {
-  nsRefPtr<CameraPreviewStateChange> cb = mOnPreviewStateChangeCb;
-  return cb.forget();
+  return mOnPreviewStateChangeCb;
 }
 void
 nsDOMCameraControl::SetOnPreviewStateChange(CameraPreviewStateChange* aCb)
 {
   mOnPreviewStateChangeCb = aCb;
 }
 
-already_AddRefed<CameraAutoFocusMovingCallback>
+CameraAutoFocusMovingCallback*
 nsDOMCameraControl::GetOnAutoFocusMoving()
 {
-  nsRefPtr<CameraAutoFocusMovingCallback> cb = mOnAutoFocusMovingCb;
-  return cb.forget();
+  return mOnAutoFocusMovingCb;
 }
 void
 nsDOMCameraControl::SetOnAutoFocusMoving(CameraAutoFocusMovingCallback* aCb)
 {
   mOnAutoFocusMovingCb = aCb;
 }
 
+CameraFaceDetectionCallback*
+nsDOMCameraControl::GetOnFacesDetected()
+{
+  return mOnFacesDetectedCb;
+}
+void
+nsDOMCameraControl::SetOnFacesDetected(CameraFaceDetectionCallback* aCb)
+{
+  mOnFacesDetectedCb = aCb;
+}
+
 already_AddRefed<dom::CameraCapabilities>
 nsDOMCameraControl::Capabilities()
 {
   nsRefPtr<CameraCapabilities> caps = mCapabilities;
 
   if (!caps) {
     caps = new CameraCapabilities(mWindow);
     nsresult rv = caps->Populate(mCameraControl);
@@ -854,16 +860,30 @@ nsDOMCameraControl::AutoFocus(CameraAuto
   if (aOnError.WasPassed()) {
     mAutoFocusOnErrorCb = &aOnError.Value();
   }
 
   aRv = mCameraControl->AutoFocus(cancel);
 }
 
 void
+nsDOMCameraControl::StartFaceDetection(ErrorResult& aRv)
+{
+  MOZ_ASSERT(mCameraControl);
+  aRv = mCameraControl->StartFaceDetection();
+}
+
+void
+nsDOMCameraControl::StopFaceDetection(ErrorResult& aRv)
+{
+  MOZ_ASSERT(mCameraControl);
+  aRv = mCameraControl->StopFaceDetection();
+}
+
+void
 nsDOMCameraControl::TakePicture(const CameraPictureOptions& aOptions,
                                 CameraTakePictureCallback& aOnSuccess,
                                 const Optional<OwningNonNull<CameraErrorCallback> >& aOnError,
                                 ErrorResult& aRv)
 {
   MOZ_ASSERT(mCameraControl);
 
   nsRefPtr<CameraTakePictureCallback> cb = mTakePictureOnSuccessCb;
@@ -952,16 +972,17 @@ nsDOMCameraControl::Shutdown()
   mSetConfigurationOnErrorCb = nullptr;
 
   // Remove all of the unsolicited event handlers too.
   mOnShutterCb = nullptr;
   mOnClosedCb = nullptr;
   mOnRecorderStateChangeCb = nullptr;
   mOnPreviewStateChangeCb = nullptr;
   mOnAutoFocusMovingCb = nullptr;
+  mOnFacesDetectedCb = nullptr;
 
   mCameraControl->Shutdown();
 }
 
 nsresult
 nsDOMCameraControl::NotifyRecordingStatusChange(const nsString& aMsg)
 {
   NS_ENSURE_TRUE(mWindow, NS_ERROR_FAILURE);
@@ -1161,16 +1182,42 @@ nsDOMCameraControl::OnAutoFocusMoving(bo
   nsRefPtr<CameraAutoFocusMovingCallback> cb = mOnAutoFocusMovingCb;
   if (cb) {
     ErrorResult ignored;
     cb->Call(aIsMoving, ignored);
   }
 }
 
 void
+nsDOMCameraControl::OnFacesDetected(const nsTArray<ICameraControl::Face>& aFaces)
+{
+  DOM_CAMERA_LOGI("DOM OnFacesDetected %u face(s)\n", aFaces.Length());
+  MOZ_ASSERT(NS_IsMainThread());
+
+  nsRefPtr<CameraFaceDetectionCallback> cb = mOnFacesDetectedCb;
+  if (!cb) {
+    return;
+  }
+
+  Sequence<OwningNonNull<DOMCameraDetectedFace> > faces;
+  uint32_t len = aFaces.Length();
+
+  if (faces.SetCapacity(len)) {
+    nsRefPtr<DOMCameraDetectedFace> f;
+    for (uint32_t i = 0; i < len; ++i) {
+      f = new DOMCameraDetectedFace(this, aFaces[i]);
+      *faces.AppendElement() = f.forget().take();
+    }
+  }
+
+  ErrorResult ignored;
+  cb->Call(faces, ignored);
+}
+
+void
 nsDOMCameraControl::OnTakePictureComplete(nsIDOMBlob* aPicture)
 {
   MOZ_ASSERT(NS_IsMainThread());
 
   nsRefPtr<CameraTakePictureCallback> cb = mTakePictureOnSuccessCb.forget();
   mTakePictureOnErrorCb = nullptr;
   if (!cb) {
     // Warn because it shouldn't be possible to get here without
--- a/dom/camera/DOMCameraControl.h
+++ b/dom/camera/DOMCameraControl.h
@@ -78,35 +78,39 @@ public:
   void SetExposureCompensation(const dom::Optional<double>& aCompensation, ErrorResult& aRv);
   double GetExposureCompensation(ErrorResult& aRv);
   int32_t SensorAngle();
   already_AddRefed<dom::CameraCapabilities> Capabilities();
   void GetIsoMode(nsString& aMode, ErrorResult& aRv);
   void SetIsoMode(const nsAString& aMode, ErrorResult& aRv);
 
   // Unsolicited event handlers.
-  already_AddRefed<dom::CameraShutterCallback> GetOnShutter();
+  dom::CameraShutterCallback* GetOnShutter();
   void SetOnShutter(dom::CameraShutterCallback* aCb);
-  already_AddRefed<dom::CameraClosedCallback> GetOnClosed();
+  dom::CameraClosedCallback* GetOnClosed();
   void SetOnClosed(dom::CameraClosedCallback* aCb);
-  already_AddRefed<dom::CameraRecorderStateChange> GetOnRecorderStateChange();
+  dom::CameraRecorderStateChange* GetOnRecorderStateChange();
   void SetOnRecorderStateChange(dom::CameraRecorderStateChange* aCb);
-  already_AddRefed<dom::CameraPreviewStateChange> GetOnPreviewStateChange();
+  dom::CameraPreviewStateChange* GetOnPreviewStateChange();
   void SetOnPreviewStateChange(dom::CameraPreviewStateChange* aCb);
-  already_AddRefed<dom::CameraAutoFocusMovingCallback> GetOnAutoFocusMoving();
+  dom::CameraAutoFocusMovingCallback* GetOnAutoFocusMoving();
   void SetOnAutoFocusMoving(dom::CameraAutoFocusMovingCallback* aCb);
+  dom::CameraFaceDetectionCallback* GetOnFacesDetected();
+  void SetOnFacesDetected(dom::CameraFaceDetectionCallback* aCb);
 
   // Methods.
   void SetConfiguration(const dom::CameraConfiguration& aConfiguration,
                         const dom::Optional<dom::OwningNonNull<dom::CameraSetConfigurationCallback> >& aOnSuccess,
                         const dom::Optional<dom::OwningNonNull<dom::CameraErrorCallback> >& aOnError,
                         ErrorResult& aRv);
   void AutoFocus(dom::CameraAutoFocusCallback& aOnSuccess,
                  const dom::Optional<dom::OwningNonNull<dom::CameraErrorCallback> >& aOnError,
                  ErrorResult& aRv);
+  void StartFaceDetection(ErrorResult& aRv);
+  void StopFaceDetection(ErrorResult& aRv);
   void TakePicture(const dom::CameraPictureOptions& aOptions,
                    dom::CameraTakePictureCallback& aOnSuccess,
                    const dom::Optional<dom::OwningNonNull<dom::CameraErrorCallback> >& aOnError,
                    ErrorResult& aRv);
   void StartRecording(const dom::CameraStartRecordingOptions& aOptions,
                       nsDOMDeviceStorage& storageArea,
                       const nsAString& filename,
                       dom::CameraStartRecordingCallback& aOnSuccess,
@@ -143,16 +147,17 @@ protected:
   friend class DOMCameraControlListener;
   friend class mozilla::StartRecordingHelper;
 
   void OnCreatedFileDescriptor(bool aSucceeded);
 
   void OnAutoFocusComplete(bool aAutoFocusSucceeded);
   void OnAutoFocusMoving(bool aIsMoving);
   void OnTakePictureComplete(nsIDOMBlob* aPicture);
+  void OnFacesDetected(const nsTArray<ICameraControl::Face>& aFaces);
 
   void OnHardwareStateChange(DOMCameraControlListener::HardwareState aState);
   void OnPreviewStateChange(DOMCameraControlListener::PreviewState aState);
   void OnRecorderStateChange(CameraControlListener::RecorderState aState, int32_t aStatus, int32_t aTrackNum);
   void OnConfigurationChange(DOMCameraConfiguration* aConfiguration);
   void OnShutter();
   void OnError(CameraControlListener::CameraErrorContext aContext, const nsAString& mError);
 
@@ -186,16 +191,17 @@ protected:
   nsRefPtr<dom::CameraErrorCallback>            mSetConfigurationOnErrorCb;
 
   // unsolicited event handlers
   nsRefPtr<dom::CameraShutterCallback>          mOnShutterCb;
   nsRefPtr<dom::CameraClosedCallback>           mOnClosedCb;
   nsRefPtr<dom::CameraRecorderStateChange>      mOnRecorderStateChangeCb;
   nsRefPtr<dom::CameraPreviewStateChange>       mOnPreviewStateChangeCb;
   nsRefPtr<dom::CameraAutoFocusMovingCallback>  mOnAutoFocusMovingCb;
+  nsRefPtr<dom::CameraFaceDetectionCallback>    mOnFacesDetectedCb;
 
   // Camera event listener; we only need this weak reference so that
   //  we can remove the listener from the camera when we're done
   //  with it.
   DOMCameraControlListener* mListener;
 
   // our viewfinder stream
   CameraPreviewMediaStream* mInput;
--- a/dom/camera/DOMCameraControlListener.cpp
+++ b/dom/camera/DOMCameraControlListener.cpp
@@ -238,16 +238,41 @@ DOMCameraControlListener::OnAutoFocusMov
   protected:
     bool mIsMoving;
   };
 
   NS_DispatchToMainThread(new Callback(mDOMCameraControl, aIsMoving));
 }
 
 void
+DOMCameraControlListener::OnFacesDetected(const nsTArray<ICameraControl::Face>& aFaces)
+{
+  class Callback : public DOMCallback
+  {
+  public:
+    Callback(nsMainThreadPtrHandle<nsDOMCameraControl> aDOMCameraControl,
+             const nsTArray<ICameraControl::Face>& aFaces)
+      : DOMCallback(aDOMCameraControl)
+      , mFaces(aFaces)
+    { }
+
+    void
+    RunCallback(nsDOMCameraControl* aDOMCameraControl) MOZ_OVERRIDE
+    {
+      aDOMCameraControl->OnFacesDetected(mFaces);
+    }
+
+  protected:
+    const nsTArray<ICameraControl::Face> mFaces;
+  };
+
+  NS_DispatchToMainThread(new Callback(mDOMCameraControl, aFaces));
+}
+
+void
 DOMCameraControlListener::OnShutter()
 {
   class Callback : public DOMCallback
   {
   public:
     Callback(nsMainThreadPtrHandle<nsDOMCameraControl> aDOMCameraControl)
       : DOMCallback(aDOMCameraControl)
     { }
--- a/dom/camera/DOMCameraControlListener.h
+++ b/dom/camera/DOMCameraControlListener.h
@@ -15,16 +15,17 @@ class CameraPreviewMediaStream;
 
 class DOMCameraControlListener : public CameraControlListener
 {
 public:
   DOMCameraControlListener(nsDOMCameraControl* aDOMCameraControl, CameraPreviewMediaStream* aStream);
 
   virtual void OnAutoFocusComplete(bool aAutoFocusSucceeded) MOZ_OVERRIDE;
   virtual void OnAutoFocusMoving(bool aIsMoving) MOZ_OVERRIDE;
+  virtual void OnFacesDetected(const nsTArray<ICameraControl::Face>& aFaces) MOZ_OVERRIDE;
   virtual void OnTakePictureComplete(uint8_t* aData, uint32_t aLength, const nsAString& aMimeType) MOZ_OVERRIDE;
 
   virtual void OnHardwareStateChange(HardwareState aState) MOZ_OVERRIDE;
   virtual void OnPreviewStateChange(PreviewState aState) MOZ_OVERRIDE;
   virtual void OnRecorderStateChange(RecorderState aState, int32_t aStatus, int32_t aTrackNum) MOZ_OVERRIDE;
   virtual void OnConfigurationChange(const CameraListenerConfiguration& aConfiguration) MOZ_OVERRIDE;
   virtual void OnShutter() MOZ_OVERRIDE;
   virtual bool OnNewPreviewFrame(layers::Image* aImage, uint32_t aWidth, uint32_t aHeight) MOZ_OVERRIDE;
new file mode 100644
--- /dev/null
+++ b/dom/camera/DOMCameraDetectedFace.cpp
@@ -0,0 +1,64 @@
+/* This Source Code Form is subject to the terms of the Mozilla Public
+ * License, v. 2.0. If a copy of the MPL was not distributed with this file,
+ * You can obtain one at http://mozilla.org/MPL/2.0/. */
+
+#include "DOMCameraDetectedFace.h"
+
+using namespace mozilla;
+using namespace mozilla::dom;
+
+NS_IMPL_CYCLE_COLLECTION_WRAPPERCACHE_1(DOMCameraPoint, mParent)
+
+NS_IMPL_CYCLE_COLLECTING_ADDREF(DOMCameraPoint)
+NS_IMPL_CYCLE_COLLECTING_RELEASE(DOMCameraPoint)
+NS_INTERFACE_MAP_BEGIN_CYCLE_COLLECTION(DOMCameraPoint)
+  NS_WRAPPERCACHE_INTERFACE_MAP_ENTRY
+  NS_INTERFACE_MAP_ENTRY(nsISupports)
+NS_INTERFACE_MAP_END
+
+NS_IMPL_CYCLE_COLLECTION_WRAPPERCACHE_5(DOMCameraDetectedFace, mParent,
+                                        mBounds, mLeftEye, mRightEye, mMouth)
+
+NS_IMPL_CYCLE_COLLECTING_ADDREF(DOMCameraDetectedFace)
+NS_IMPL_CYCLE_COLLECTING_RELEASE(DOMCameraDetectedFace)
+NS_INTERFACE_MAP_BEGIN_CYCLE_COLLECTION(DOMCameraDetectedFace)
+  NS_WRAPPERCACHE_INTERFACE_MAP_ENTRY
+  NS_INTERFACE_MAP_ENTRY(nsISupports)
+NS_INTERFACE_MAP_END
+
+JSObject*
+DOMCameraPoint::WrapObject(JSContext* aCx)
+{
+  return CameraPointBinding::Wrap(aCx, this);
+}
+
+JSObject*
+DOMCameraDetectedFace::WrapObject(JSContext* aCx)
+{
+  return CameraDetectedFaceBinding::Wrap(aCx, this);
+}
+
+DOMCameraDetectedFace::DOMCameraDetectedFace(nsISupports* aParent,
+                                             const ICameraControl::Face& aFace)
+  : mParent(aParent)
+  , mId(aFace.id)
+  , mScore(aFace.score)
+  , mBounds(new DOMRect(MOZ_THIS_IN_INITIALIZER_LIST()))
+{
+  mBounds->SetRect(aFace.bound.left,
+                   aFace.bound.top,
+                   aFace.bound.right - aFace.bound.left,
+                   aFace.bound.bottom - aFace.bound.top);
+
+  if (aFace.hasLeftEye) {
+    mLeftEye = new DOMCameraPoint(this, aFace.leftEye);
+  }
+  if (aFace.hasRightEye) {
+    mRightEye = new DOMCameraPoint(this, aFace.rightEye);
+  }
+  if (aFace.hasMouth) {
+    mMouth = new DOMCameraPoint(this, aFace.mouth);
+  }
+
+  SetIsDOMBinding();
+}
new file mode 100644
--- /dev/null
+++ b/dom/camera/DOMCameraDetectedFace.h
@@ -0,0 +1,112 @@
+/* This Source Code Form is subject to the terms of the Mozilla Public
+ * License, v. 2.0. If a copy of the MPL was not distributed with this file,
+ * You can obtain one at http://mozilla.org/MPL/2.0/. */
+
+#ifndef DOM_CAMERA_DOMCAMERADETECTEDFACE_H
+#define DOM_CAMERA_DOMCAMERADETECTEDFACE_H
+
+#include "mozilla/dom/CameraControlBinding.h"
+#include "nsCycleCollectionParticipant.h"
+#include "nsWrapperCache.h"
+#include "mozilla/dom/DOMRect.h"
+#include "ICameraControl.h"
+
+namespace mozilla {
+
+namespace dom {
+
+class DOMCameraPoint MOZ_FINAL : public nsISupports
+                               , public nsWrapperCache
+{
+public:
+  NS_DECL_CYCLE_COLLECTING_ISUPPORTS
+  NS_DECL_CYCLE_COLLECTION_SCRIPT_HOLDER_CLASS(DOMCameraPoint)
+
+  DOMCameraPoint(nsISupports* aParent, const ICameraControl::Point& aPoint)
+    : mParent(aParent)
+    , mX(aPoint.x)
+    , mY(aPoint.y)
+  {
+    SetIsDOMBinding();
+  }
+
+  void
+  SetPoint(int32_t aX, int32_t aY)
+  {
+    mX = aX;
+    mY = aY;
+  }
+
+  int32_t X() { return mX; }
+  int32_t Y() { return mY; }
+
+  void SetX(int32_t aX) { mX = aX; }
+  void SetY(int32_t aY) { mY = aY; }
+
+  nsISupports*
+  GetParentObject() const
+  {
+    MOZ_ASSERT(mParent);
+    return mParent;
+  }
+
+  virtual JSObject* WrapObject(JSContext* aCx) MOZ_OVERRIDE;
+
+protected:
+  virtual ~DOMCameraPoint() { }
+
+  nsCOMPtr<nsISupports> mParent;
+  int32_t mX;
+  int32_t mY;
+};
+
+class DOMCameraDetectedFace MOZ_FINAL : public nsISupports
+                                      , public nsWrapperCache
+{
+public:
+  NS_DECL_CYCLE_COLLECTING_ISUPPORTS
+  NS_DECL_CYCLE_COLLECTION_SCRIPT_HOLDER_CLASS(DOMCameraDetectedFace)
+
+  DOMCameraDetectedFace(nsISupports* aParent, const ICameraControl::Face& aFace);
+
+  uint32_t Id()       { return mId; }
+  uint32_t Score()    { return mScore; }
+  bool HasLeftEye()   { return mLeftEye; }
+  bool HasRightEye()  { return mRightEye; }
+  bool HasMouth()     { return mMouth; }
+
+  dom::DOMRect* Bounds()        { return mBounds; }
+
+  DOMCameraPoint* GetLeftEye()  { return mLeftEye; }
+  DOMCameraPoint* GetRightEye() { return mRightEye; }
+  DOMCameraPoint* GetMouth()    { return mMouth; }
+
+  nsISupports*
+  GetParentObject() const
+  {
+    MOZ_ASSERT(mParent);
+    return mParent;
+  }
+
+  virtual JSObject* WrapObject(JSContext* aCx) MOZ_OVERRIDE;
+
+protected:
+  virtual ~DOMCameraDetectedFace() { }
+
+  nsCOMPtr<nsISupports> mParent;
+
+  uint32_t mId;
+  uint32_t mScore;
+
+  nsRefPtr<dom::DOMRect> mBounds;
+
+  nsRefPtr<DOMCameraPoint> mLeftEye;
+  nsRefPtr<DOMCameraPoint> mRightEye;
+  nsRefPtr<DOMCameraPoint> mMouth;
+};
+
+} // namespace dom
+
+} // namespace mozilla
+
+#endif // DOM_CAMERA_DOMCAMERADETECTEDFACE_H
--- a/dom/camera/FallbackCameraCapabilities.cpp
+++ b/dom/camera/FallbackCameraCapabilities.cpp
@@ -105,16 +105,23 @@ DOMCameraCapabilities::GetStepExposureCo
 
 /* [implicit_jscontext] readonly attribute long maxMeteringAreas; */
 NS_IMETHODIMP
 DOMCameraCapabilities::GetMaxMeteringAreas(JSContext* cx, int32_t* aMaxMeteringAreas)
 {
     return NS_ERROR_NOT_IMPLEMENTED;
 }
 
+/* [implicit_jscontext] readonly attribute long maxDetectedFaces; */
+NS_IMETHODIMP
+DOMCameraCapabilities::GetMaxDetectedFaces(JSContext* cx, int32_t* aMaxDetectedFaces)
+{
+  return NS_ERROR_NOT_IMPLEMENTED;
+}
+
 /* [implicit_jscontext] readonly attribute jsval zoomRatios; */
 NS_IMETHODIMP
 DOMCameraCapabilities::GetZoomRatios(JSContext* cx, JS::Value* aZoomRatios)
 {
     return NS_ERROR_NOT_IMPLEMENTED;
 }
 
 /* [implicit_jscontext] readonly attribute jsval videoSizes; */
--- a/dom/camera/FallbackCameraControl.cpp
+++ b/dom/camera/FallbackCameraControl.cpp
@@ -56,16 +56,18 @@ public:
   nsresult PullParameters() { return NS_ERROR_FAILURE; }
 
 protected:
   ~FallbackCameraControl();
 
   virtual nsresult StartPreviewImpl() { return NS_ERROR_FAILURE; }
   virtual nsresult StopPreviewImpl() { return NS_ERROR_FAILURE; }
   virtual nsresult AutoFocusImpl(bool aCancelExistingCall) { return NS_ERROR_FAILURE; }
+  virtual nsresult StartFaceDetectionImpl() { return NS_ERROR_FAILURE; }
+  virtual nsresult StopFaceDetectionImpl() { return NS_ERROR_FAILURE; }
   virtual nsresult TakePictureImpl() { return NS_ERROR_FAILURE; }
   virtual nsresult StartRecordingImpl(DeviceStorageFileDescriptor* aFileDescriptor,
                                       const StartRecordingOptions* aOptions = nullptr)
                                         { return NS_ERROR_FAILURE; }
   virtual nsresult StopRecordingImpl() { return NS_ERROR_FAILURE; }
   virtual nsresult PushParametersImpl() { return NS_ERROR_FAILURE; }
   virtual nsresult PullParametersImpl() { return NS_ERROR_FAILURE; }
   virtual already_AddRefed<RecorderProfileManager> GetRecorderProfileManagerImpl() { return nullptr; }
--- a/dom/camera/GonkCameraControl.cpp
+++ b/dom/camera/GonkCameraControl.cpp
@@ -550,16 +550,44 @@ nsGonkCameraControl::AutoFocusImpl(bool 
 
   if (mCameraHw->AutoFocus() != OK) {
     return NS_ERROR_FAILURE;
   }
   return NS_OK;
 }
 
 nsresult
+nsGonkCameraControl::StartFaceDetectionImpl()
+{
+  MOZ_ASSERT(NS_GetCurrentThread() == mCameraThread);
+  RETURN_IF_NO_CAMERA_HW();
+
+  DOM_CAMERA_LOGI("Starting face detection\n");
+
+  if (mCameraHw->StartFaceDetection() != OK) {
+    return NS_ERROR_FAILURE;
+  }
+  return NS_OK;
+}
+
+nsresult
+nsGonkCameraControl::StopFaceDetectionImpl()
+{
+  MOZ_ASSERT(NS_GetCurrentThread() == mCameraThread);
+  RETURN_IF_NO_CAMERA_HW();
+
+  DOM_CAMERA_LOGI("Stopping face detection\n");
+
+  if (mCameraHw->StopFaceDetection() != OK) {
+    return NS_ERROR_FAILURE;
+  }
+  return NS_OK;
+}
+
+nsresult
 nsGonkCameraControl::SetThumbnailSizeImpl(const Size& aSize)
 {
   MOZ_ASSERT(NS_GetCurrentThread() == mCameraThread);
 
   /**
    * We keep a copy of the specified size so that if the picture size
    * changes, we can choose a new thumbnail size close to what was asked for
    * last time.
@@ -967,16 +995,93 @@ nsGonkCameraControl::OnAutoFocusComplete
 
   /**
    * Because the callback needs to call PullParametersImpl(),
    * we need to dispatch this callback through the Camera Thread.
    */
   mCameraThread->Dispatch(new AutoFocusComplete(this, aSuccess), NS_DISPATCH_NORMAL);
 }
 
+bool
+FeatureDetected(int32_t feature[])
+{
+  /**
+   * For information on what constitutes a valid feature, see:
+   * http://androidxref.com/4.0.4/xref/system/core/include/system/camera.h#202
+   *
+   * Although the comments explicitly state that undetected features are
+   * indicated using the value -2000, we conservatively include anything
+   * outside the explicitly valid range of [-1000, 1000] as undetected
+   * as well.
+   */
+  const int32_t kLowerFeatureBound = -1000;
+  const int32_t kUpperFeatureBound = 1000;
+  return (feature[0] >= kLowerFeatureBound && feature[0] <= kUpperFeatureBound) ||
+         (feature[1] >= kLowerFeatureBound && feature[1] <= kUpperFeatureBound);
+}
+
+void
+nsGonkCameraControl::OnFacesDetected(camera_frame_metadata_t* aMetaData)
+{
+  NS_ENSURE_TRUE_VOID(aMetaData);
+
+  nsTArray<Face> faces;
+  uint32_t numFaces = aMetaData->number_of_faces;
+  DOM_CAMERA_LOGI("Camera detected %d face(s)", numFaces);
+
+  faces.SetCapacity(numFaces);
+
+  for (uint32_t i = 0; i < numFaces; ++i) {
+    Face* f = faces.AppendElement();
+
+    f->id = aMetaData->faces[i].id;
+    f->score = aMetaData->faces[i].score;
+    if (f->score > 100) {
+      f->score = 100;
+    }
+    f->bound.left = aMetaData->faces[i].rect[0];
+    f->bound.top = aMetaData->faces[i].rect[1];
+    f->bound.right = aMetaData->faces[i].rect[2];
+    f->bound.bottom = aMetaData->faces[i].rect[3];
+    DOM_CAMERA_LOGI("Camera face[%u] appended: id=%d, score=%d, bound=(%d, %d)-(%d, %d)\n",
+      i, f->id, f->score, f->bound.left, f->bound.top, f->bound.right, f->bound.bottom);
+
+    f->hasLeftEye = FeatureDetected(aMetaData->faces[i].left_eye);
+    if (f->hasLeftEye) {
+      f->leftEye.x = aMetaData->faces[i].left_eye[0];
+      f->leftEye.y = aMetaData->faces[i].left_eye[1];
+      DOM_CAMERA_LOGI("    Left eye detected at (%d, %d)\n",
+        f->leftEye.x, f->leftEye.y);
+    } else {
+      DOM_CAMERA_LOGI("    No left eye detected\n");
+    }
+
+    f->hasRightEye = FeatureDetected(aMetaData->faces[i].right_eye);
+    if (f->hasRightEye) {
+      f->rightEye.x = aMetaData->faces[i].right_eye[0];
+      f->rightEye.y = aMetaData->faces[i].right_eye[1];
+      DOM_CAMERA_LOGI("    Right eye detected at (%d, %d)\n",
+        f->rightEye.x, f->rightEye.y);
+    } else {
+      DOM_CAMERA_LOGI("    No right eye detected\n");
+    }
+
+    f->hasMouth = FeatureDetected(aMetaData->faces[i].mouth);
+    if (f->hasMouth) {
+      f->mouth.x = aMetaData->faces[i].mouth[0];
+      f->mouth.y = aMetaData->faces[i].mouth[1];
+      DOM_CAMERA_LOGI("    Mouth detected at (%d, %d)\n", f->mouth.x, f->mouth.y);
+    } else {
+      DOM_CAMERA_LOGI("    No mouth detected\n");
+    }
+  }
+
+  CameraControlImpl::OnFacesDetected(faces);
+}
+
 void
 nsGonkCameraControl::OnTakePictureComplete(uint8_t* aData, uint32_t aLength)
 {
   ReentrantMonitorAutoEnter mon(mReentrantMonitor);
 
   uint8_t* data = new uint8_t[aLength];
 
   memcpy(data, aData, aLength);
@@ -1472,16 +1577,22 @@ OnAutoFocusComplete(nsGonkCameraControl*
 
 void
 OnAutoFocusMoving(nsGonkCameraControl* gc, bool aIsMoving)
 {
   gc->OnAutoFocusMoving(aIsMoving);
 }
 
 void
+OnFacesDetected(nsGonkCameraControl* gc, camera_frame_metadata_t* aMetaData)
+{
+  gc->OnFacesDetected(aMetaData);
+}
+
+void
 OnNewPreviewFrame(nsGonkCameraControl* gc, layers::TextureClient* aBuffer)
 {
   gc->OnNewPreviewFrame(aBuffer);
 }
 
 void
 OnShutter(nsGonkCameraControl* gc)
 {
--- a/dom/camera/GonkCameraControl.h
+++ b/dom/camera/GonkCameraControl.h
@@ -44,16 +44,17 @@ class GonkRecorderProfile;
 class GonkRecorderProfileManager;
 
 class nsGonkCameraControl : public CameraControlImpl
 {
 public:
   nsGonkCameraControl(uint32_t aCameraId);
 
   void OnAutoFocusComplete(bool aSuccess);
+  void OnFacesDetected(camera_frame_metadata_t* aMetaData);
   void OnTakePictureComplete(uint8_t* aData, uint32_t aLength);
   void OnTakePictureError();
   void OnNewPreviewFrame(layers::TextureClient* aBuffer);
   void OnRecorderEvent(int msg, int ext1, int ext2);
   void OnError(CameraControlListener::CameraErrorContext aWhere,
                CameraControlListener::CameraError aError);
 
   virtual nsresult Set(uint32_t aKey, const nsAString& aValue) MOZ_OVERRIDE;
@@ -78,16 +79,17 @@ public:
   nsresult PushParameters();
   nsresult PullParameters();
 
 protected:
   ~nsGonkCameraControl();
 
   using CameraControlImpl::OnNewPreviewFrame;
   using CameraControlImpl::OnAutoFocusComplete;
+  using CameraControlImpl::OnFacesDetected;
   using CameraControlImpl::OnTakePictureComplete;
   using CameraControlImpl::OnConfigurationChange;
   using CameraControlImpl::OnError;
 
   virtual void BeginBatchParameterSet() MOZ_OVERRIDE;
   virtual void EndBatchParameterSet() MOZ_OVERRIDE;
 
   virtual nsresult StartImpl(const Configuration* aInitialConfig = nullptr) MOZ_OVERRIDE;
@@ -99,16 +101,18 @@ protected:
   nsresult SetPictureConfiguration(const Configuration& aConfig);
   nsresult SetVideoConfiguration(const Configuration& aConfig);
 
   template<class T> nsresult SetAndPush(uint32_t aKey, const T& aValue);
 
   virtual nsresult StartPreviewImpl() MOZ_OVERRIDE;
   virtual nsresult StopPreviewImpl() MOZ_OVERRIDE;
   virtual nsresult AutoFocusImpl(bool aCancelExistingCall) MOZ_OVERRIDE;
+  virtual nsresult StartFaceDetectionImpl() MOZ_OVERRIDE;
+  virtual nsresult StopFaceDetectionImpl() MOZ_OVERRIDE;
   virtual nsresult TakePictureImpl() MOZ_OVERRIDE;
   virtual nsresult StartRecordingImpl(DeviceStorageFileDescriptor* aFileDescriptor,
                                       const StartRecordingOptions* aOptions = nullptr) MOZ_OVERRIDE;
   virtual nsresult StopRecordingImpl() MOZ_OVERRIDE;
   virtual nsresult PushParametersImpl() MOZ_OVERRIDE;
   virtual nsresult PullParametersImpl() MOZ_OVERRIDE;
   virtual already_AddRefed<RecorderProfileManager> GetRecorderProfileManagerImpl() MOZ_OVERRIDE;
   already_AddRefed<GonkRecorderProfileManager> GetGonkRecorderProfileManager();
@@ -161,16 +165,17 @@ private:
   nsGonkCameraControl& operator=(const nsGonkCameraControl&) MOZ_DELETE;
 };
 
 // camera driver callbacks
 void OnTakePictureComplete(nsGonkCameraControl* gc, uint8_t* aData, uint32_t aLength);
 void OnTakePictureError(nsGonkCameraControl* gc);
 void OnAutoFocusComplete(nsGonkCameraControl* gc, bool aSuccess);
 void OnAutoFocusMoving(nsGonkCameraControl* gc, bool aIsMoving);
+void OnFacesDetected(nsGonkCameraControl* gc, camera_frame_metadata_t* aMetaData);
 void OnNewPreviewFrame(nsGonkCameraControl* gc, layers::TextureClient* aBuffer);
 void OnShutter(nsGonkCameraControl* gc);
 void OnClosed(nsGonkCameraControl* gc);
 void OnError(nsGonkCameraControl* gc, CameraControlListener::CameraError aError,
              int32_t aArg1, int32_t aArg2);
 
 } // namespace mozilla
 
--- a/dom/camera/GonkCameraHwMgr.cpp
+++ b/dom/camera/GonkCameraHwMgr.cpp
@@ -74,16 +74,20 @@ GonkCameraHardware::postData(int32_t aMs
     case CAMERA_MSG_COMPRESSED_IMAGE:
       if (aDataPtr != nullptr) {
         OnTakePictureComplete(mTarget, static_cast<uint8_t*>(aDataPtr->pointer()), aDataPtr->size());
       } else {
         OnTakePictureError(mTarget);
       }
       break;
 
+    case CAMERA_MSG_PREVIEW_METADATA:
+      OnFacesDetected(mTarget, metadata);
+      break;
+
     default:
       DOM_CAMERA_LOGE("Unhandled data callback event %d\n", aMsgType);
       break;
   }
 }
 
 // Android notify callback
 void
@@ -291,16 +295,48 @@ GonkCameraHardware::AutoFocus()
 void
 GonkCameraHardware::CancelAutoFocus()
 {
   DOM_CAMERA_LOGI("%s\n", __func__);
   mCamera->cancelAutoFocus();
 }
 
 int
+GonkCameraHardware::StartFaceDetection()
+{
+  DOM_CAMERA_LOGI("%s\n", __func__);
+  int rv = INVALID_OPERATION;
+
+#if ANDROID_VERSION >= 15
+  rv = mCamera->sendCommand(CAMERA_CMD_START_FACE_DETECTION, CAMERA_FACE_DETECTION_HW, 0);
+#endif
+  if (rv != OK) {
+    DOM_CAMERA_LOGE("Start face detection failed with status %d", rv);
+  }
+
+  return rv;
+}
+
+int
+GonkCameraHardware::StopFaceDetection()
+{
+  DOM_CAMERA_LOGI("%s\n", __func__);
+  int rv = INVALID_OPERATION;
+
+#if ANDROID_VERSION >= 15
+  rv = mCamera->sendCommand(CAMERA_CMD_STOP_FACE_DETECTION, 0, 0);
+#endif
+  if (rv != OK) {
+    DOM_CAMERA_LOGE("Stop face detection failed with status %d", rv);
+  }
+
+  return rv;
+}
+
+int
 GonkCameraHardware::TakePicture()
 {
   return mCamera->takePicture(CAMERA_MSG_SHUTTER | CAMERA_MSG_COMPRESSED_IMAGE);
 }
 
 void
 GonkCameraHardware::CancelTakePicture()
 {
--- a/dom/camera/GonkCameraHwMgr.h
+++ b/dom/camera/GonkCameraHwMgr.h
@@ -74,16 +74,18 @@ public:
   enum {
     RAW_SENSOR_ORIENTATION,
     OFFSET_SENSOR_ORIENTATION
   };
   virtual int      GetSensorOrientation(uint32_t aType = RAW_SENSOR_ORIENTATION);
 
   virtual int      AutoFocus();
   virtual void     CancelAutoFocus();
+  virtual int      StartFaceDetection();
+  virtual int      StopFaceDetection();
   virtual int      TakePicture();
   virtual void     CancelTakePicture();
   virtual int      StartPreview();
   virtual void     StopPreview();
   virtual int      PushParameters(const mozilla::GonkCameraParameters& aParams);
   virtual int      PushParameters(const CameraParameters& aParams);
   virtual nsresult PullParameters(mozilla::GonkCameraParameters& aParams);
   virtual void     PullParameters(CameraParameters& aParams);
--- a/dom/camera/GonkCameraParameters.cpp
+++ b/dom/camera/GonkCameraParameters.cpp
@@ -109,16 +109,18 @@ GonkCameraParameters::Parameters::GetTex
     case CAMERA_PARAM_SUPPORTED_MAXEXPOSURECOMPENSATION:
       return KEY_MAX_EXPOSURE_COMPENSATION;
     case CAMERA_PARAM_SUPPORTED_EXPOSURECOMPENSATIONSTEP:
       return KEY_EXPOSURE_COMPENSATION_STEP;
     case CAMERA_PARAM_SUPPORTED_ZOOM:
       return KEY_ZOOM_SUPPORTED;
     case CAMERA_PARAM_SUPPORTED_ZOOMRATIOS:
       return KEY_ZOOM_RATIOS;
+    case CAMERA_PARAM_SUPPORTED_MAXDETECTEDFACES:
+      return KEY_MAX_NUM_DETECTED_FACES_HW;
     case CAMERA_PARAM_SUPPORTED_JPEG_THUMBNAIL_SIZES:
       return KEY_SUPPORTED_JPEG_THUMBNAIL_SIZES;
     case CAMERA_PARAM_SUPPORTED_ISOMODES:
       // Not every platform defines KEY_SUPPORTED_ISO_MODES;
       // for those that don't, we use the raw string key.
       return "iso-values";
     default:
       DOM_CAMERA_LOGE("Unhandled camera parameter value %u\n", aKey);
@@ -540,35 +542,33 @@ GonkCameraParameters::SetTranslated(uint
         if (value <= mZoomRatios[0]) {
           index = 0;
         } else if (value >= mZoomRatios.LastElement()) {
           index = mZoomRatios.Length() - 1;
         } else {
           // mZoomRatios is sorted, so we can binary search it
           int bottom = 0;
           int top = mZoomRatios.Length() - 1;
-          int middle;
 
           while (top >= bottom) {
-            middle = (top + bottom) / 2;
-            if (value == mZoomRatios[middle]) {
+            index = (top + bottom) / 2;
+            if (value == mZoomRatios[index]) {
               // exact match
               break;
             }
-            if (value > mZoomRatios[middle] && value < mZoomRatios[middle + 1]) {
+            if (value > mZoomRatios[index] && value < mZoomRatios[index + 1]) {
               // the specified zoom value lies in this interval
               break;
             }
-            if (value > mZoomRatios[middle]) {
-              bottom = middle + 1;
+            if (value > mZoomRatios[index]) {
+              bottom = index + 1;
             } else {
-              top = middle - 1;
+              top = index - 1;
             }
           }
-          index = middle;
         }
         DOM_CAMERA_LOGI("Zoom = %fx --> index = %d\n", aValue, index);
       }
       return SetImpl(CAMERA_PARAM_ZOOM, index);
   }
 
   return SetImpl(aKey, aValue);
 }
--- a/dom/camera/ICameraControl.h
+++ b/dom/camera/ICameraControl.h
@@ -64,16 +64,17 @@ enum {
   CAMERA_PARAM_SUPPORTED_FOCUSMODES,
   CAMERA_PARAM_SUPPORTED_MAXFOCUSAREAS,
   CAMERA_PARAM_SUPPORTED_MAXMETERINGAREAS,
   CAMERA_PARAM_SUPPORTED_MINEXPOSURECOMPENSATION,
   CAMERA_PARAM_SUPPORTED_MAXEXPOSURECOMPENSATION,
   CAMERA_PARAM_SUPPORTED_EXPOSURECOMPENSATIONSTEP,
   CAMERA_PARAM_SUPPORTED_ZOOM,
   CAMERA_PARAM_SUPPORTED_ZOOMRATIOS,
+  CAMERA_PARAM_SUPPORTED_MAXDETECTEDFACES,
   CAMERA_PARAM_SUPPORTED_JPEG_THUMBNAIL_SIZES,
   CAMERA_PARAM_SUPPORTED_ISOMODES
 };
 
 class ICameraControl
 {
 public:
   NS_INLINE_DECL_THREADSAFE_REFCOUNTING(ICameraControl)
@@ -97,33 +98,52 @@ public:
     int32_t   top;
     int32_t   left;
     int32_t   bottom;
     int32_t   right;
     uint32_t  weight;
   };
 
   struct Position {
-    double latitude;
-    double longitude;
-    double altitude;
-    double timestamp;
+    double    latitude;
+    double    longitude;
+    double    altitude;
+    double    timestamp;
   };
 
   struct StartRecordingOptions {
     uint32_t  rotation;
     uint32_t  maxFileSizeBytes;
     uint32_t  maxVideoLengthMs;
   };
 
   struct Configuration {
     Mode      mMode;
     Size      mPreviewSize;
     nsString  mRecorderProfile;
   };
+
+  struct Point
+  {
+    int32_t   x;
+    int32_t   y;
+  };
+
+  struct Face {
+    uint32_t  id;
+    uint32_t  score;
+    Region    bound;
+    bool      hasLeftEye;
+    Point     leftEye;
+    bool      hasRightEye;
+    Point     rightEye;
+    bool      hasMouth;
+    Point     mouth;
+  };
+
   static already_AddRefed<ICameraControl> Create(uint32_t aCameraId);
 
   virtual nsresult Start(const Configuration* aInitialConfig = nullptr) = 0;
   virtual nsresult Stop() = 0;
 
   virtual nsresult SetConfiguration(const Configuration& aConfig) = 0;
 
   virtual void AddListener(CameraControlListener* aListener) = 0;
@@ -131,16 +151,18 @@ public:
 
   virtual nsresult StartPreview() = 0;
   virtual nsresult StopPreview() = 0;
   virtual nsresult AutoFocus(bool aCancelExistingCall) = 0;
   virtual nsresult TakePicture() = 0;
   virtual nsresult StartRecording(DeviceStorageFileDescriptor *aFileDescriptor,
                                   const StartRecordingOptions* aOptions = nullptr) = 0;
   virtual nsresult StopRecording() = 0;
+  virtual nsresult StartFaceDetection() = 0;
+  virtual nsresult StopFaceDetection() = 0;
 
   virtual nsresult Set(uint32_t aKey, const nsAString& aValue) = 0;
   virtual nsresult Get(uint32_t aKey, nsAString& aValue) = 0;
   virtual nsresult Set(uint32_t aKey, double aValue) = 0;
   virtual nsresult Get(uint32_t aKey, double& aValue) = 0;
   virtual nsresult Set(uint32_t aKey, int32_t aValue) = 0;
   virtual nsresult Get(uint32_t aKey, int32_t& aValue) = 0;
   virtual nsresult Set(uint32_t aKey, int64_t aValue) = 0;
--- a/dom/camera/TestGonkCameraHardware.cpp
+++ b/dom/camera/TestGonkCameraHardware.cpp
@@ -22,26 +22,26 @@
 using namespace android;
 using namespace mozilla;
 
 TestGonkCameraHardware::TestGonkCameraHardware(nsGonkCameraControl* aTarget,
                                                uint32_t aCameraId,
                                                const sp<Camera>& aCamera)
   : GonkCameraHardware(aTarget, aCameraId, aCamera)
 {
-  DOM_CAMERA_LOGA("+===== Created TestGonkCameraHardware =====+\n");
+  DOM_CAMERA_LOGA("v===== Created TestGonkCameraHardware =====v\n");
   DOM_CAMERA_LOGT("%s:%d : this=%p (aTarget=%p)\n",
     __func__, __LINE__, this, aTarget);
   MOZ_COUNT_CTOR(TestGonkCameraHardware);
 }
 
 TestGonkCameraHardware::~TestGonkCameraHardware()
 {
   MOZ_COUNT_DTOR(TestGonkCameraHardware);
-  DOM_CAMERA_LOGA("+===== Destroyed TestGonkCameraHardware =====+\n");
+  DOM_CAMERA_LOGA("^===== Destroyed TestGonkCameraHardware =====^\n");
 }
 
 nsresult
 TestGonkCameraHardware::Init()
 {
   if (IsTestCase("init-failure")) {
     return NS_ERROR_FAILURE;
   }
@@ -133,16 +133,223 @@ TestGonkCameraHardware::AutoFocus()
     }
     DOM_CAMERA_LOGE("Failed to dispatch AutoFocusFailure runnable (0x%08x)\n", rv);
     return UNKNOWN_ERROR;
   }
 
   return GonkCameraHardware::AutoFocus();
 }
 
+// These classes have to be external to StartFaceDetection(), at least
+// until we pick up gcc 4.5, which supports local classes as template
+// arguments.
+class FaceDetected : public nsRunnable
+{
+public:
+  FaceDetected(nsGonkCameraControl* aTarget)
+    : mTarget(aTarget)
+  { }
+
+  ~FaceDetected()
+  {
+    ReleaseFacesArray();
+  }
+
+  NS_IMETHODIMP
+  Run()
+  {
+    InitMetaData();
+    OnFacesDetected(mTarget, &mMetaData);
+    return NS_OK;
+  }
+
+protected:
+  virtual nsresult InitMetaData() = 0;
+
+  nsresult
+  AllocateFacesArray(uint32_t num)
+  {
+    mMetaData.faces = new camera_face_t[num];
+    return NS_OK;
+  }
+
+  nsresult
+  ReleaseFacesArray()
+  {
+    delete [] mMetaData.faces;
+    mMetaData.faces = nullptr;
+    return NS_OK;
+  }
+
+  nsRefPtr<nsGonkCameraControl> mTarget;
+  camera_frame_metadata_t mMetaData;
+};
+
+class OneFaceDetected : public FaceDetected
+{
+public:
+  OneFaceDetected(nsGonkCameraControl* aTarget)
+    : FaceDetected(aTarget)
+  { }
+
+  nsresult
+  InitMetaData() MOZ_OVERRIDE
+  {
+    mMetaData.number_of_faces = 1;
+    AllocateFacesArray(1);
+    mMetaData.faces[0].id = 1;
+    mMetaData.faces[0].score = 2;
+    mMetaData.faces[0].rect[0] = 3;
+    mMetaData.faces[0].rect[1] = 4;
+    mMetaData.faces[0].rect[2] = 5;
+    mMetaData.faces[0].rect[3] = 6;
+    mMetaData.faces[0].left_eye[0] = 7;
+    mMetaData.faces[0].left_eye[1] = 8;
+    mMetaData.faces[0].right_eye[0] = 9;
+    mMetaData.faces[0].right_eye[1] = 10;
+    mMetaData.faces[0].mouth[0] = 11;
+    mMetaData.faces[0].mouth[1] = 12;
+
+    return NS_OK;
+  }
+};
+
+class TwoFacesDetected : public FaceDetected
+{
+public:
+  TwoFacesDetected(nsGonkCameraControl* aTarget)
+    : FaceDetected(aTarget)
+  { }
+
+  nsresult
+  InitMetaData() MOZ_OVERRIDE
+  {
+    mMetaData.number_of_faces = 2;
+    AllocateFacesArray(2);
+    mMetaData.faces[0].id = 1;
+    mMetaData.faces[0].score = 2;
+    mMetaData.faces[0].rect[0] = 3;
+    mMetaData.faces[0].rect[1] = 4;
+    mMetaData.faces[0].rect[2] = 5;
+    mMetaData.faces[0].rect[3] = 6;
+    mMetaData.faces[0].left_eye[0] = 7;
+    mMetaData.faces[0].left_eye[1] = 8;
+    mMetaData.faces[0].right_eye[0] = 9;
+    mMetaData.faces[0].right_eye[1] = 10;
+    mMetaData.faces[0].mouth[0] = 11;
+    mMetaData.faces[0].mouth[1] = 12;
+    mMetaData.faces[1].id = 13;
+    mMetaData.faces[1].score = 14;
+    mMetaData.faces[1].rect[0] = 15;
+    mMetaData.faces[1].rect[1] = 16;
+    mMetaData.faces[1].rect[2] = 17;
+    mMetaData.faces[1].rect[3] = 18;
+    mMetaData.faces[1].left_eye[0] = 19;
+    mMetaData.faces[1].left_eye[1] = 20;
+    mMetaData.faces[1].right_eye[0] = 21;
+    mMetaData.faces[1].right_eye[1] = 22;
+    mMetaData.faces[1].mouth[0] = 23;
+    mMetaData.faces[1].mouth[1] = 24;
+
+    return NS_OK;
+  }
+};
+
+class OneFaceNoFeaturesDetected : public FaceDetected
+{
+public:
+  OneFaceNoFeaturesDetected(nsGonkCameraControl* aTarget)
+    : FaceDetected(aTarget)
+  { }
+
+  nsresult
+  InitMetaData() MOZ_OVERRIDE
+  {
+    mMetaData.number_of_faces = 1;
+    AllocateFacesArray(1);
+    mMetaData.faces[0].id = 1;
+    // Test clamping 'score' to 100.
+    mMetaData.faces[0].score = 1000;
+    mMetaData.faces[0].rect[0] = 3;
+    mMetaData.faces[0].rect[1] = 4;
+    mMetaData.faces[0].rect[2] = 5;
+    mMetaData.faces[0].rect[3] = 6;
+    // Nullable values set to 'not-supported' specific values
+    mMetaData.faces[0].left_eye[0] = -2000;
+    mMetaData.faces[0].left_eye[1] = -2000;
+    // Test other 'not-supported' values as well. We treat
+    // anything outside the range [-1000, 1000] as invalid.
+    mMetaData.faces[0].right_eye[0] = 1001;
+    mMetaData.faces[0].right_eye[1] = -1001;
+    mMetaData.faces[0].mouth[0] = -2000;
+    mMetaData.faces[0].mouth[1] = 2000;
+
+    return NS_OK;
+  }
+};
+
+class NoFacesDetected : public FaceDetected
+{
+public:
+  NoFacesDetected(nsGonkCameraControl* aTarget)
+    : FaceDetected(aTarget)
+  { }
+
+  nsresult
+  InitMetaData() MOZ_OVERRIDE
+  {
+    mMetaData.number_of_faces = 0;
+    mMetaData.faces = nullptr;
+
+    return NS_OK;
+  }
+};
+
+int
+TestGonkCameraHardware::StartFaceDetection()
+{
+  nsRefPtr<FaceDetected> faceDetected;
+
+  if (IsTestCase("face-detection-detected-one-face")) {
+    faceDetected = new OneFaceDetected(mTarget);
+  } else if (IsTestCase("face-detection-detected-two-faces")) {
+    faceDetected = new TwoFacesDetected(mTarget);
+  } else if (IsTestCase("face-detection-detected-one-face-no-features")) {
+    faceDetected = new OneFaceNoFeaturesDetected(mTarget);
+  } else if (IsTestCase("face-detection-no-faces-detected")) {
+    faceDetected = new NoFacesDetected(mTarget);
+  }
+
+  if (!faceDetected) {
+    return GonkCameraHardware::StartFaceDetection();
+  }
+
+  nsresult rv = NS_DispatchToCurrentThread(faceDetected);
+  if (NS_FAILED(rv)) {
+    DOM_CAMERA_LOGE("Failed to dispatch FaceDetected runnable (0x%08x)\n", rv);
+    return UNKNOWN_ERROR;
+  }
+
+  return OK;
+}
+
+int
+TestGonkCameraHardware::StopFaceDetection()
+{
+  if (IsTestCase("face-detection-detected-one-face") ||
+      IsTestCase("face-detection-detected-two-faces") ||
+      IsTestCase("face-detection-detected-one-face-no-features") ||
+      IsTestCase("face-detection-no-faces-detected"))
+  {
+    return OK;
+  }
+
+  return GonkCameraHardware::StopFaceDetection();
+}
+
 int
 TestGonkCameraHardware::TakePicture()
 {
   class TakePictureFailure : public nsRunnable
   {
   public:
     TakePictureFailure(nsGonkCameraControl* aTarget)
       : mTarget(aTarget)
--- a/dom/camera/TestGonkCameraHardware.h
+++ b/dom/camera/TestGonkCameraHardware.h
@@ -20,16 +20,18 @@
 #include "GonkCameraHwMgr.h"
 
 namespace android {
 
 class TestGonkCameraHardware : public android::GonkCameraHardware
 {
 public:
   virtual int AutoFocus() MOZ_OVERRIDE;
+  virtual int StartFaceDetection() MOZ_OVERRIDE;
+  virtual int StopFaceDetection() MOZ_OVERRIDE;
   virtual int TakePicture() MOZ_OVERRIDE;
   virtual int StartPreview() MOZ_OVERRIDE;
   virtual int PushParameters(const mozilla::GonkCameraParameters& aParams) MOZ_OVERRIDE;
   virtual nsresult PullParameters(mozilla::GonkCameraParameters& aParams) MOZ_OVERRIDE;
   virtual int StartRecording() MOZ_OVERRIDE;
   virtual int StopRecording() MOZ_OVERRIDE;
   virtual int SetListener(const sp<GonkCameraListener>& aListener) MOZ_OVERRIDE;
   virtual int StoreMetaDataInBuffers(bool aEnabled) MOZ_OVERRIDE;
--- a/dom/camera/moz.build
+++ b/dom/camera/moz.build
@@ -16,16 +16,17 @@ EXPORTS += [
 
 SOURCES += [
     'CameraControlImpl.cpp',
     'CameraPreviewMediaStream.cpp',
     'CameraRecorderProfiles.cpp',
     'DOMCameraCapabilities.cpp',
     'DOMCameraControl.cpp',
     'DOMCameraControlListener.cpp',
+    'DOMCameraDetectedFace.cpp',
     'DOMCameraManager.cpp',
 ]
 
 if CONFIG['MOZ_B2G_CAMERA']:
     SOURCES += [
         'GonkCameraControl.cpp',
         'GonkCameraHwMgr.cpp',
         'GonkCameraManager.cpp',
--- a/dom/camera/test/mochitest.ini
+++ b/dom/camera/test/mochitest.ini
@@ -3,9 +3,10 @@ support-files = camera_common.js
 
 [test_camera.html]
 [test_camera_2.html]
 [test_camera_3.html]
 [test_camera_hardware_init_failure.html]
 [test_camera_hardware_failures.html]
 [test_bug975472.html]
 [test_camera_fake_parameters.html]
+[test_camera_hardware_face_detection.html]
 [test_camera_hardware_auto_focus_moving_cb.html]
new file mode 100644
--- /dev/null
+++ b/dom/camera/test/test_camera_hardware_face_detection.html
@@ -0,0 +1,320 @@
+<!DOCTYPE HTML>
+<html>
+<!--
+https://bugzilla.mozilla.org/show_bug.cgi?id=965420
+-->
+<head>
+  <title>Bug 965420 - Test camera hardware API for face detection</title>
+  <script type="text/javascript" src="/MochiKit/MochiKit.js"></script>
+  <script type="text/javascript" src="/tests/SimpleTest/SimpleTest.js"></script>
+  <script type="text/javascript" src="camera_common.js"></script>
+  <link rel="stylesheet" type="text/css" href="/tests/SimpleTest/test.css" />
+</head>
+<body>
+  <a target="_blank" href="https://bugzilla.mozilla.org/show_bug.cgi?id=965420">Mozilla Bug 965420</a>
+  <video id="viewfinder" width = "200" height = "200" autoplay></video>
+  <img src="#" alt="This image is going to load" id="testimage"/>
+
+<script class="testbody" type="text/javascript;version=1.7">
+
+var whichCamera = navigator.mozCameras.getListOfCameras()[0];
+var initialConfig = {
+  mode: 'picture',
+  recorderProfile: 'cif',
+  previewSize: {
+    width: 352,
+    height: 288
+  }
+};
+
+const PREF_FACEDETECTION_ENABLED = "camera.control.face_detection.enabled";
+
+var cameraObj;
+var oldPref;
+
+// Shorthand functions
+function end() {
+  function reallyEnd() {
+    CameraTest.end();
+  }
+  if (oldPref) {
+    SpecialPowers.pushPrefEnv(
+      {'set': [[PREF_FACEDETECTION_ENABLED, oldPref]]}, reallyEnd);
+  } else {
+    SpecialPowers.pushPrefEnv(
+      {'clear': [[PREF_FACEDETECTION_ENABLED]]}, reallyEnd);
+  }
+}
+function next() {
+  CameraTest.next();
+}
+
+function compareFaces(aFaces, expected)
+{
+  ok(aFaces, "have detected faces object");
+  ok(aFaces.length == expected.faces.length,
+    "expected=" + expected.faces.length + ", got=" + aFaces.length);
+  aFaces.forEach(function (face, index) {
+    let result = compareFace(face, expected.faces[index]);
+    ok(result === "ok", "face check: " + result);
+    if (result !== "ok") {
+      return false;
+    }
+  });
+  return true;
+}
+
+function compareFace(aFace, expected)
+{
+  if (aFace.id != expected.id) {
+    return "expected face.id=" + expected.id + ", got=" + aFace.id;
+  }
+  if (aFace.score != expected.score) {
+    return "expected face.score=" + expected.score + ", got=" + aFace.score;
+  }
+  if (!aFace.bounds) {
+    return "face.bounds is missing";
+  }
+  if (aFace.bounds.left != expected.bounds.left ||
+      aFace.bounds.top != expected.bounds.top ||
+      aFace.bounds.right != expected.bounds.right ||
+      aFace.bounds.bottom != expected.bounds.bottom) {
+    return "expected face.bounds=" + expected.bounds.toSource() +
+      ", got=({left:" + aFace.bounds.left + ", top:" + aFace.bounds.top + ", right:" + aFace.bounds.right + ", bottom:" + aFace.bounds.bottom + "})";
+  }
+
+  if (aFace.leftEye && !expected.leftEye) {
+    return "expected null face.leftEye, got=({x:" + aFace.leftEye.x + ", y:" + aFace.leftEye.y + "})";
+  }
+  if (!aFace.leftEye && expected.leftEye) {
+    return "expected face.leftEye=" + expected.leftEye.toSource() + ", got null leftEye";
+  }
+  if (aFace.leftEye && expected.leftEye &&
+      (aFace.leftEye.x != expected.leftEye.x || aFace.leftEye.y != expected.leftEye.y)) {
+    return "expected face.leftEye=" + expected.leftEye.toSource() +
+      ", got=({x:" + aFace.leftEye.x + ", y:" + aFace.leftEye.y + "})";
+  }
+
+  if (aFace.rightEye && !expected.rightEye) {
+    return "expected null face.rightEye, got=({x:" + aFace.rightEye.x + ", y:" + aFace.rightEye.y + "})";
+  }
+  if (!aFace.rightEye && expected.rightEye) {
+    return "expected face.rightEye=" + expected.rightEye.toSource() + ", got null rightEye";
+  }
+  if (aFace.rightEye && expected.rightEye &&
+      (aFace.rightEye.x != expected.rightEye.x || aFace.rightEye.y != expected.rightEye.y)) {
+    return "expected face.rightEye=" + expected.rightEye.toSource() +
+      ", got=({x:" + aFace.rightEye.x + ", y:" + aFace.rightEye.y + "})";
+  }
+
+  if (aFace.mouth && !expected.mouth) {
+    return "expected null face.mouth, got=({x:" + aFace.mouth.x + ", y:" + aFace.mouth.y + "})";
+  }
+  if (!aFace.mouth && expected.mouth) {
+    return "expected face.mouth=" + expected.mouth.toSource() + ", got null mouth";
+  }
+  if (aFace.mouth && expected.mouth &&
+      (aFace.mouth.x != expected.mouth.x || aFace.mouth.y != expected.mouth.y)) {
+    return "expected face.mouth=" + expected.mouth.toSource() +
+      ", got=({x:" + aFace.mouth.x + ", y:" + aFace.mouth.y + "})";
+  }
+
+  return "ok";
+}
+
+var tests = [
+  {
+    key: "face-detection-detected-one-face",
+    func: function testFaceDetectionFoundOneFace(camera) {
+      var expected = {
+        faces: [ {
+          id:       1,
+          score:    2,
+          bounds: {
+            left:   3,
+            top:    4,
+            right:  5,
+            bottom: 6
+          },
+          leftEye: {
+            x:      7,
+            y:      8
+          },
+          rightEye: {
+            x:      9,
+            y:      10
+          },
+          mouth: {
+            x:      11,
+            y:      12
+          }
+        } ]
+      };
+      camera.onFacesDetected = function(aFaces) {
+        ok(compareFaces(aFaces, expected),
+          "onFaceDetected received the detected faces correctly");
+        camera.stopFaceDetection();
+        next();
+      }
+      camera.startFaceDetection();
+    }
+  },
+  {
+    key: "face-detection-detected-two-faces",
+    func: function testFaceDetectionFoundTwoFace(camera) {
+      var expected = {
+        faces: [ {
+          id:       1,
+          score:    2,
+          bounds: {
+            left:   3,
+            top:    4,
+            right:  5,
+            bottom: 6
+          },
+          leftEye: {
+            x:      7,
+            y:      8
+          },
+          rightEye: {
+            x:      9,
+            y:      10
+          },
+          mouth: {
+            x:      11,
+            y:      12
+          }
+        },
+        {
+          id:       13,
+          score:    14,
+          bounds: {
+            left:   15,
+            top:    16,
+            right:  17,
+            bottom: 18
+          },
+          leftEye: {
+            x:      19,
+            y:      20
+          },
+          rightEye: {
+            x:      21,
+            y:      22
+          },
+          mouth: {
+            x:      23,
+            y:      24
+          }
+        } ]
+      };
+      camera.onFacesDetected = function(aFaces) {
+        ok(compareFaces(aFaces, expected),
+          "onFaceDetected received the detected faces correctly");
+        camera.stopFaceDetection();
+        next();
+      }
+      camera.startFaceDetection();
+    }
+  },
+  {
+    key: "face-detection-detected-one-face-no-features",
+    func: function (camera) {
+      var expected = {
+        faces: [ {
+          id:       1,
+          score:    100,
+          bounds: {
+            left:   3,
+            top:    4,
+            right:  5,
+            bottom: 6
+          },
+          leftEye:  null,
+          rightEye: null,
+          mouth:    null
+        } ]
+      };
+      camera.onFacesDetected = function(aFaces) {
+        ok(compareFaces(aFaces, expected),
+          "onFaceDetected received the detected faces correctly");
+        camera.stopFaceDetection();
+        next();
+      }
+      camera.startFaceDetection();
+    }
+  },
+  {
+    key: "face-detection-no-faces-detected",
+    func: function (camera) {
+      var expected = {
+        faces: []
+      };
+      camera.onFacesDetected = function(aFaces) {
+        ok(compareFaces(aFaces, expected),
+          "onFaceDetected received the detected faces correctly");
+        camera.stopFaceDetection();
+        next();
+      }
+      camera.startFaceDetection();
+    }
+  },
+];
+
+var testGenerator = function() {
+  for (var i = 0; i < tests.length; ++i ) {
+    yield tests[i];
+  }
+}();
+
+window.addEventListener('beforeunload', function() {
+  document.getElementById('viewfinder').mozSrcObject = null;
+  if (cameraObj) {
+    cameraObj.release();
+    cameraObj = null;
+  }
+});
+
+// Must call CameraTest.begin() before any other async methods.
+CameraTest.begin("hardware", function(test) {
+  // If the pref doesn't exist, this get will fail; catch it and continue.
+  try {
+    oldPref = SpecialPowers.getBoolPref(PREF_FACEDETECTION_ENABLED);
+  } catch(e) { }
+
+  SpecialPowers.pushPrefEnv({'set': [[PREF_FACEDETECTION_ENABLED, true]]}, function() {
+    var enabled;
+    try {
+      enabled = SpecialPowers.getBoolPref(PREF_FACEDETECTION_ENABLED);
+    } catch(e) { }
+    ok(enabled, PREF_FACEDETECTION_ENABLED + " is " + enabled);
+
+    function onSuccess(camera, config) {
+      document.getElementById('viewfinder').mozSrcObject = camera;
+      cameraObj = camera;
+      CameraTest.next = function() {
+        try {
+          var t = testGenerator.next();
+          test.set(t.key, t.func.bind(undefined, camera));
+        } catch(e) {
+          if (e instanceof StopIteration) {
+            end();
+          } else {
+            throw e;
+          }
+        }
+      };
+      next();
+    }
+    function onError(error) {
+      ok(false, "getCamera() failed with: " + error);
+      end();
+    }
+    navigator.mozCameras.getCamera(whichCamera, initialConfig, onSuccess, onError);
+  })
+});
+
+</script>
+</body>
+
+</html>
--- a/dom/tests/mochitest/general/test_interfaces.html
+++ b/dom/tests/mochitest/general/test_interfaces.html
@@ -167,18 +167,22 @@ var interfaceNamesInGlobalScope =
     {name: "CallEvent", b2g: true, pref: "dom.telephony.enabled"},
 // IMPORTANT: Do not change this list without review from a DOM peer!
     {name: "CallGroupErrorEvent", b2g: true, pref: "dom.telephony.enabled"},
 // IMPORTANT: Do not change this list without review from a DOM peer!
     "CameraCapabilities",
 // IMPORTANT: Do not change this list without review from a DOM peer!
     "CameraControl",
 // IMPORTANT: Do not change this list without review from a DOM peer!
+    {name: "CameraDetectedFace", pref: "camera.control.face_detection.enabled"},
+// IMPORTANT: Do not change this list without review from a DOM peer!
     "CameraManager",
 // IMPORTANT: Do not change this list without review from a DOM peer!
+    {name: "CameraPoint", pref: "camera.control.face_detection.enabled"},
+// IMPORTANT: Do not change this list without review from a DOM peer!
     "CanvasGradient",
 // IMPORTANT: Do not change this list without review from a DOM peer!
     "CanvasPattern",
 // IMPORTANT: Do not change this list without review from a DOM peer!
     "CanvasRenderingContext2D",
 // IMPORTANT: Do not change this list without review from a DOM peer!
     "CaretPosition",
 // IMPORTANT: Do not change this list without review from a DOM peer!
--- a/dom/webidl/CameraCapabilities.webidl
+++ b/dom/webidl/CameraCapabilities.webidl
@@ -19,16 +19,17 @@ interface CameraCapabilities
   [Constant, Cached] readonly attribute sequence<DOMString> effects;
   [Constant, Cached] readonly attribute sequence<DOMString> flashModes;
   [Constant, Cached] readonly attribute sequence<DOMString> focusModes;
 
   [Constant, Cached] readonly attribute sequence<double> zoomRatios;
 
   [Constant, Cached] readonly attribute unsigned long maxFocusAreas;
   [Constant, Cached] readonly attribute unsigned long maxMeteringAreas;
+  [Constant, Cached] readonly attribute unsigned long maxDetectedFaces;
 
   [Constant, Cached] readonly attribute double minExposureCompensation;
   [Constant, Cached] readonly attribute double maxExposureCompensation;
   [Constant, Cached] readonly attribute double exposureCompensationStep;
 
   [Constant, Cached] readonly attribute any recorderProfiles;
 
   [Constant, Cached] readonly attribute sequence<DOMString> isoModes;
--- a/dom/webidl/CameraControl.webidl
+++ b/dom/webidl/CameraControl.webidl
@@ -340,8 +340,105 @@ interface CameraControl : MediaStream
      argument or union argument containing a dictionary not followed by
      a required argument must be optional"
   */
   [Throws]
   void setConfiguration(optional CameraConfiguration configuration,
                         optional CameraSetConfigurationCallback onSuccess,
                         optional CameraErrorCallback onError);
 };
+
+/* The coordinates of a point, relative to the camera sensor, of the center of
+   detected facial features. As with CameraRegions:
+     { x: -1000, y: -1000 } is the top-left corner
+     { x:  1000, y:  1000 } is the bottom-right corner
+   x and y can range from -1000 to 1000.
+*/
+[Pref="camera.control.face_detection.enabled"]
+interface CameraPoint
+{
+  attribute long x;
+  attribute long y;
+};
+
+/* The information of the each face detected by a camera device, e.g.
+     {
+       id: 1,
+       score: 80,
+       bound: { left:   -203,
+                top:    -400,
+                right:   300,
+                bottom:  250 },
+       leftEye:  { x:  -100,
+                   y:  -200 },
+       rightEye: { x:   100,
+                   y:   100 },
+       mouth:    { x:   150,
+                   y:   150 } }
+
+   'id' is an unique value per face while the face is visible to the tracker.
+   If the face leaves the viewfinder and then returns, it will be assigned
+   a new value.
+
+   'score' is the confidence level for the detection of the face.
+   This range is 1 to 100, where 100 is the highest confidence.
+
+   'bounds' is the bounds of the face. It is guaranteed left < right and
+   top < bottom. The coordinates can be smaller than -1000 or bigger than 1000.
+   But at least one vertex will be within (-1000, -1000) and (1000, 1000).
+
+   'leftEye' is the coordinates of the centre of the left eye. The coordinates
+   are in the same space as the ones for 'bounds'. This is an optional field
+   and may not be supported on all devices. If it is not supported or detected,
+   the value will be set to null.
+
+   'rightEye' is the coordinates of the detected right eye; null if not
+   supported or detected.
+
+   'mouth' is the coordinates of the detected mouth; null if not supported or
+   detected.
+*/
+[Pref="camera.control.face_detection.enabled"]
+interface CameraDetectedFace
+{
+  readonly attribute unsigned long id;
+
+  readonly attribute unsigned long score;
+
+  readonly attribute DOMRect bounds;
+
+  readonly attribute boolean hasLeftEye;
+  readonly attribute CameraPoint? leftEye;
+
+  readonly attribute boolean hasRightEye;
+  readonly attribute CameraPoint? rightEye;
+
+  readonly attribute boolean hasMouth;
+  readonly attribute CameraPoint? mouth;
+};
+
+callback CameraFaceDetectionCallback = void (sequence<CameraDetectedFace> faces);
+
+partial interface CameraControl
+{
+  /* Starts the face detection. This should be called after the preview is
+     started. The camera will periodically call 'onFacesDetected' with a
+     sequence of zero or one or more detected faces in the preview frame.
+
+     How often the callback is invoked is implementation dependent.
+
+     This method throws an exception if face detection fails to start.
+  */
+  [Throws, Pref="camera.control.face_detection.enabled"]
+  void startFaceDetection();
+
+  /* Stops the face detection.
+
+     This method throws an exception if face detection can't be stopped.
+  */
+  [Throws, Pref="camera.control.face_detection.enabled"]
+  void stopFaceDetection();
+
+  /* Callback for faces detected in the preview frame. If no faces are
+     detected, the callback is invoked with an empty sequence. */
+  [Pref="camera.control.face_detection.enabled"]
+  attribute CameraFaceDetectionCallback? onFacesDetected;
+};
--- a/modules/libpref/src/init/all.js
+++ b/modules/libpref/src/init/all.js
@@ -4423,8 +4423,9 @@ pref("image.mozsamplesize.enabled", fals
 // Enable navigator.sendBeacon on all platforms except b2g because it doesn't
 // play nicely with Firefox OS apps yet.
 #ifndef MOZ_WIDGET_GONK
 pref("beacon.enabled", true);
 #endif
 
 // Camera prefs
 pref("camera.control.autofocus_moving_callback.enabled", false);
+pref("camera.control.face_detection.enabled", false);
--- a/xpcom/glue/nsCycleCollectionParticipant.h
+++ b/xpcom/glue/nsCycleCollectionParticipant.h
@@ -1504,11 +1504,60 @@ static NS_CYCLE_COLLECTION_INNERCLASS NS
  NS_IMPL_CYCLE_COLLECTION_TRAVERSE(_f14)                                       \
  NS_IMPL_CYCLE_COLLECTION_TRAVERSE(_f15)                                       \
  NS_IMPL_CYCLE_COLLECTION_TRAVERSE(_f16)                                       \
  NS_IMPL_CYCLE_COLLECTION_TRAVERSE(_f17)                                       \
  NS_IMPL_CYCLE_COLLECTION_TRAVERSE(_f18)                                       \
  NS_IMPL_CYCLE_COLLECTION_TRAVERSE(_f19)                                       \
  NS_IMPL_CYCLE_COLLECTION_TRAVERSE_END
 
+#define NS_IMPL_CYCLE_COLLECTION_INHERITED_20(_class, _base, _f1, _f2, _f3, _f4, _f5, \
+                                              _f6, _f7, _f8, _f9, _f10, _f11, _f12, _f13, _f14, \
+                                              _f15, _f16, _f17, _f18, _f19, _f20) \
+ NS_IMPL_CYCLE_COLLECTION_CLASS(_class)                                        \
+ NS_IMPL_CYCLE_COLLECTION_UNLINK_BEGIN_INHERITED(_class, _base)                \
+ NS_IMPL_CYCLE_COLLECTION_UNLINK(_f1)                                          \
+ NS_IMPL_CYCLE_COLLECTION_UNLINK(_f2)                                          \
+ NS_IMPL_CYCLE_COLLECTION_UNLINK(_f3)                                          \
+ NS_IMPL_CYCLE_COLLECTION_UNLINK(_f4)                                          \
+ NS_IMPL_CYCLE_COLLECTION_UNLINK(_f5)                                          \
+ NS_IMPL_CYCLE_COLLECTION_UNLINK(_f6)                                          \
+ NS_IMPL_CYCLE_COLLECTION_UNLINK(_f7)                                          \
+ NS_IMPL_CYCLE_COLLECTION_UNLINK(_f8)                                          \
+ NS_IMPL_CYCLE_COLLECTION_UNLINK(_f9)                                          \
+ NS_IMPL_CYCLE_COLLECTION_UNLINK(_f10)                                         \
+ NS_IMPL_CYCLE_COLLECTION_UNLINK(_f11)                                         \
+ NS_IMPL_CYCLE_COLLECTION_UNLINK(_f12)                                         \
+ NS_IMPL_CYCLE_COLLECTION_UNLINK(_f13)                                         \
+ NS_IMPL_CYCLE_COLLECTION_UNLINK(_f14)                                         \
+ NS_IMPL_CYCLE_COLLECTION_UNLINK(_f15)                                         \
+ NS_IMPL_CYCLE_COLLECTION_UNLINK(_f16)                                         \
+ NS_IMPL_CYCLE_COLLECTION_UNLINK(_f17)                                         \
+ NS_IMPL_CYCLE_COLLECTION_UNLINK(_f18)                                         \
+ NS_IMPL_CYCLE_COLLECTION_UNLINK(_f19)                                         \
+ NS_IMPL_CYCLE_COLLECTION_UNLINK(_f20)                                         \
+ NS_IMPL_CYCLE_COLLECTION_UNLINK_END                                           \
+ NS_IMPL_CYCLE_COLLECTION_TRAVERSE_BEGIN_INHERITED(_class, _base)              \
+ NS_IMPL_CYCLE_COLLECTION_TRAVERSE(_f1)                                        \
+ NS_IMPL_CYCLE_COLLECTION_TRAVERSE(_f2)                                        \
+ NS_IMPL_CYCLE_COLLECTION_TRAVERSE(_f3)                                        \
+ NS_IMPL_CYCLE_COLLECTION_TRAVERSE(_f4)                                        \
+ NS_IMPL_CYCLE_COLLECTION_TRAVERSE(_f5)                                        \
+ NS_IMPL_CYCLE_COLLECTION_TRAVERSE(_f6)                                        \
+ NS_IMPL_CYCLE_COLLECTION_TRAVERSE(_f7)                                        \
+ NS_IMPL_CYCLE_COLLECTION_TRAVERSE(_f8)                                        \
+ NS_IMPL_CYCLE_COLLECTION_TRAVERSE(_f9)                                        \
+ NS_IMPL_CYCLE_COLLECTION_TRAVERSE(_f10)                                       \
+ NS_IMPL_CYCLE_COLLECTION_TRAVERSE(_f11)                                       \
+ NS_IMPL_CYCLE_COLLECTION_TRAVERSE(_f12)                                       \
+ NS_IMPL_CYCLE_COLLECTION_TRAVERSE(_f13)                                       \
+ NS_IMPL_CYCLE_COLLECTION_TRAVERSE(_f14)                                       \
+ NS_IMPL_CYCLE_COLLECTION_TRAVERSE(_f15)                                       \
+ NS_IMPL_CYCLE_COLLECTION_TRAVERSE(_f16)                                       \
+ NS_IMPL_CYCLE_COLLECTION_TRAVERSE(_f17)                                       \
+ NS_IMPL_CYCLE_COLLECTION_TRAVERSE(_f18)                                       \
+ NS_IMPL_CYCLE_COLLECTION_TRAVERSE(_f19)                                       \
+ NS_IMPL_CYCLE_COLLECTION_TRAVERSE(_f20)                                       \
+ NS_IMPL_CYCLE_COLLECTION_TRAVERSE_END
+
 #define NS_CYCLE_COLLECTION_NOTE_EDGE_NAME CycleCollectionNoteEdgeName
 
 #endif // nsCycleCollectionParticipant_h__