gfx/vr/gfxVRCardboard.cpp
author Margaret Leibovic <margaret.leibovic@gmail.com>
Wed, 17 Jun 2015 17:46:27 -0700
changeset 268461 e796ac2a6f7a7b95245bce5cd40d66ce309d6ede
parent 255154 deb77afd348fc8d5f3dcdaf629b5fd796992b2ed
child 271601 91d6e262b662a0b4e47358665e222d3927337af9
permissions -rw-r--r--
Bug 1170841 - Show warning in add-on manager for add-ons that aren't properly signed. r=liuche

/* -*- Mode: C++; tab-width: 20; indent-tabs-mode: nil; c-basic-offset: 2 -*-
 * This Source Code Form is subject to the terms of the Mozilla Public
 * License, v. 2.0. If a copy of the MPL was not distributed with this
 * file, You can obtain one at http://mozilla.org/MPL/2.0/. */

#include <math.h>

#include "prlink.h"
#include "prmem.h"
#include "prenv.h"
#include "gfxPrefs.h"
#include "nsString.h"
#include "mozilla/dom/ScreenOrientation.h"
#include "mozilla/Preferences.h"
#include "mozilla/Hal.h"

#include "gfxVRCardboard.h"

#include "nsServiceManagerUtils.h"
#include "nsIScreenManager.h"

#ifdef ANDROID
#include <android/log.h>
#define LOG(args...)  __android_log_print(ANDROID_LOG_INFO, "GeckoVR" , ## args)
#else
#define LOG(...) do { } while(0)
#endif

#ifndef M_PI
# define M_PI 3.14159265358979323846
#endif

// 1/sqrt(2) (aka sqrt(2)/2)
#ifndef M_SQRT1_2
# define M_SQRT1_2	0.70710678118654752440
#endif

using namespace mozilla::dom;
using namespace mozilla::gfx;
using namespace mozilla::gfx::impl;

namespace {
// some utility functions

// This remaps axes in the given matrix to a new configuration based on the
// screen orientation.  Similar to what Android SensorManager.remapCoordinateSystem
// does, except only for a fixed number of transforms that we need.
Matrix4x4
RemapMatrixForOrientation(ScreenOrientation screenConfig, const Matrix4x4& aMatrix)
{
  Matrix4x4 out;
  const float *in = &aMatrix._11;
  float *o = &out._11;

  if (screenConfig == eScreenOrientation_LandscapePrimary) {
    // remap X,Y -> Y,-X
    o[0] = -in[1]; o[1] = in[0]; o[2] = in[2];
    o[4] = -in[5]; o[5] = in[4]; o[6] = in[6];
    o[8] = -in[9]; o[9] = in[8]; o[10] = in[10];
  } else if (screenConfig == eScreenOrientation_LandscapeSecondary) {
    // remap X,Y -> -Y,X
    o[0] = in[1]; o[1] = -in[0]; o[2] = in[2];
    o[4] = in[5]; o[5] = -in[4]; o[6] = in[6];
    o[8] = in[9]; o[9] = -in[8]; o[10] = in[10];
  } else if (screenConfig == eScreenOrientation_PortraitPrimary ||
             screenConfig == eScreenOrientation_PortraitSecondary)
  {
    // remap X,Y -> X,-Z
    o[0] = in[0]; o[1] = in[2]; o[2] = -in[1];
    o[4] = in[4]; o[5] = in[6]; o[6] = -in[5];
    o[8] = in[8]; o[9] = in[10]; o[10] = -in[9];
  } else {
    MOZ_ASSERT(0, "gfxVRCardboard::RemapMatrixForOrientation invalid screenConfig");
  }

  return out;
}

}

HMDInfoCardboard::HMDInfoCardboard()
  : VRHMDInfo(VRHMDType::Cardboard)
  , mStartCount(0)
  , mOrient(eScreenOrientation_PortraitPrimary)
{
  MOZ_ASSERT(sizeof(HMDInfoCardboard::DistortionVertex) == sizeof(VRDistortionVertex),
             "HMDInfoCardboard::DistortionVertex must match the size of VRDistortionVertex");

  MOZ_COUNT_CTOR_INHERITED(HMDInfoCardboard, VRHMDInfo);

  mDeviceName.AssignLiteral("Phone Sensor (Cardboard) HMD");

  mSupportedSensorBits = State_Orientation;

  mRecommendedEyeFOV[Eye_Left] = VRFieldOfView(45.0, 45.0, 45.0, 45.0);
  mRecommendedEyeFOV[Eye_Right] = VRFieldOfView(45.0, 45.0, 45.0, 45.0);

  mMaximumEyeFOV[Eye_Left] = VRFieldOfView(45.0, 45.0, 45.0, 45.0);
  mMaximumEyeFOV[Eye_Right] = VRFieldOfView(45.0, 45.0, 45.0, 45.0);

  SetFOV(mRecommendedEyeFOV[Eye_Left], mRecommendedEyeFOV[Eye_Right], 0.01, 10000.0);
}

bool
HMDInfoCardboard::StartSensorTracking()
{
  LOG("HMDInfoCardboard::StartSensorTracking %d\n", mStartCount);
  if (mStartCount == 0) {
    // it's never been started before; initialize observers and
    // initial state.

    mozilla::hal::ScreenConfiguration sconfig;
    mozilla::hal::GetCurrentScreenConfiguration(&sconfig);
    this->Notify(sconfig);

    mozilla::hal::RegisterSensorObserver(mozilla::hal::SENSOR_GAME_ROTATION_VECTOR, this);
    mozilla::hal::RegisterScreenConfigurationObserver(this);

    mLastSensorState.Clear();
  }

  mStartCount++;
  return true;
}

void
HMDInfoCardboard::Notify(const mozilla::hal::ScreenConfiguration& config)
{
  mOrient = config.orientation();

  if (mOrient == eScreenOrientation_LandscapePrimary) {
    mScreenTransform = Quaternion(0.f, 0.f, M_SQRT1_2, M_SQRT1_2);
  } else if (mOrient == eScreenOrientation_LandscapeSecondary) {
    mScreenTransform = Quaternion(0.f, 0.f, -M_SQRT1_2, M_SQRT1_2);
  } else if (mOrient == eScreenOrientation_PortraitPrimary) {
    mScreenTransform = Quaternion();
  } else if (mOrient == eScreenOrientation_PortraitSecondary) {
    mScreenTransform = Quaternion(0.f, 0.f, 1.f, 0.f);
  }
}

void
HMDInfoCardboard::Notify(const mozilla::hal::SensorData& data)
{
  if (data.sensor() != mozilla::hal::SENSOR_GAME_ROTATION_VECTOR)
    return;

  const nsTArray<float>& sensorValues = data.values();

  // This is super chatty
  //LOG("HMDInfoCardboard::Notify %f %f %f %f\n", sensorValues[0], sensorValues[1], sensorValues[2], sensorValues[3]);

  mSavedLastSensor.Set(sensorValues[0], sensorValues[1], sensorValues[2], sensorValues[3]);
  mSavedLastSensorTime = data.timestamp();
  mNeedsSensorCompute = true;
}

void
HMDInfoCardboard::ComputeStateFromLastSensor()
{
  if (!mNeedsSensorCompute)
    return;

  // apply the zero orientation
  Quaternion q = mSensorZeroInverse * mSavedLastSensor;

  // make a matrix from the quat
  Matrix4x4 qm;
  qm.SetRotationFromQuaternion(q);

  // remap the coordinate space, based on the orientation
  Matrix4x4 qmRemapped = RemapMatrixForOrientation(mOrient, qm);

  // turn it back into a quat
  q.SetFromRotationMatrix(qmRemapped);

  // apply adjustment based on what's been done to the screen and the original zero
  // position of the base coordinate space
  q = mScreenTransform * q;

  VRHMDSensorState& state = mLastSensorState;

  state.flags |= State_Orientation;
  state.orientation[0] = q.x;
  state.orientation[1] = q.y;
  state.orientation[2] = q.z;
  state.orientation[3] = q.w;

  state.timestamp = mSavedLastSensorTime / 1000000.0;

  mNeedsSensorCompute = false;
}

VRHMDSensorState
HMDInfoCardboard::GetSensorState(double timeOffset)
{
  ComputeStateFromLastSensor();
  return mLastSensorState;
}

void
HMDInfoCardboard::StopSensorTracking()
{
  LOG("HMDInfoCardboard::StopSensorTracking, count %d\n", mStartCount);
  if (--mStartCount == 0) {
    mozilla::hal::UnregisterScreenConfigurationObserver(this);
    mozilla::hal::UnregisterSensorObserver(mozilla::hal::SENSOR_GAME_ROTATION_VECTOR, this);
  }
}

void
HMDInfoCardboard::ZeroSensor()
{
  mSensorZeroInverse = mSavedLastSensor;
  mSensorZeroInverse.Invert();
}

static Matrix4x4
ConstructProjectionMatrix(const VRFieldOfView& fov, bool rightHanded, double zNear, double zFar)
{
  float upTan = tan(fov.upDegrees * M_PI / 180.0);
  float downTan = tan(fov.downDegrees * M_PI / 180.0);
  float leftTan = tan(fov.leftDegrees * M_PI / 180.0);
  float rightTan = tan(fov.rightDegrees * M_PI / 180.0);

  float handednessScale = rightHanded ? -1.0 : 1.0;

  float pxscale = 2.0f / (leftTan + rightTan);
  float pxoffset = (leftTan - rightTan) * pxscale * 0.5;
  float pyscale = 2.0f / (upTan + downTan);
  float pyoffset = (upTan - downTan) * pyscale * 0.5;

  Matrix4x4 mobj;
  float *m = &mobj._11;

  m[0*4+0] = pxscale;
  m[0*4+2] = pxoffset * handednessScale;

  m[1*4+1] = pyscale;
  m[1*4+2] = -pyoffset * handednessScale;

  m[2*4+2] = zFar / (zNear - zFar) * -handednessScale;
  m[2*4+3] = (zFar * zNear) / (zNear - zFar);

  m[3*4+2] = handednessScale;

  return mobj;
}

bool
HMDInfoCardboard::SetFOV(const VRFieldOfView& aFOVLeft,
                         const VRFieldOfView& aFOVRight,
                         double zNear, double zFar)
{
  const float standardIPD = 0.064f;

  for (uint32_t eye = 0; eye < NumEyes; eye++) {
    mEyeFOV[eye] = eye == Eye_Left ? aFOVLeft : aFOVRight;
    mEyeTranslation[eye] = Point3D(standardIPD * (eye == Eye_Left ? -1.0 : 1.0), 0.0, 0.0);
    mEyeProjectionMatrix[eye] = ConstructProjectionMatrix(mEyeFOV[eye], true, zNear, zFar);

    mDistortionMesh[eye].mVertices.SetLength(4);
    mDistortionMesh[eye].mIndices.SetLength(6);

    HMDInfoCardboard::DistortionVertex *destv = reinterpret_cast<HMDInfoCardboard::DistortionVertex*>(mDistortionMesh[eye].mVertices.Elements());
    float xoffs = eye == Eye_Left ? 0.0f : 1.0f;
    float txoffs = eye == Eye_Left ? 0.0f : 0.5f;
    destv[0].pos[0] = -1.0 + xoffs;
    destv[0].pos[1] = -1.0;
    destv[0].texR[0] = destv[0].texG[0] = destv[0].texB[0] = 0.0 + txoffs;
    destv[0].texR[1] = destv[0].texG[1] = destv[0].texB[1] = 1.0;
    destv[0].padding[0] = 1.0; // vignette factor

    destv[1].pos[0] = 0.0 + xoffs;
    destv[1].pos[1] = -1.0;
    destv[1].texR[0] = destv[1].texG[0] = destv[1].texB[0] = 0.5 + txoffs;
    destv[1].texR[1] = destv[1].texG[1] = destv[1].texB[1] = 1.0;
    destv[1].padding[0] = 1.0; // vignette factor

    destv[2].pos[0] = 0.0 + xoffs;
    destv[2].pos[1] = 1.0;
    destv[2].texR[0] = destv[2].texG[0] = destv[2].texB[0] = 0.5 + txoffs;
    destv[2].texR[1] = destv[2].texG[1] = destv[2].texB[1] = 0.0;
    destv[2].padding[0] = 1.0; // vignette factor

    destv[3].pos[0] = -1.0 + xoffs;
    destv[3].pos[1] = 1.0;
    destv[3].texR[0] = destv[3].texG[0] = destv[3].texB[0] = 0.0 + txoffs;
    destv[3].texR[1] = destv[3].texG[1] = destv[3].texB[1] = 0.0;
    destv[3].padding[0] = 1.0; // vignette factor

    uint16_t *iv = mDistortionMesh[eye].mIndices.Elements();
    iv[0] = 0; iv[1] = 1; iv[2] = 2;
    iv[3] = 2; iv[4] = 3; iv[5] = 0;
  }

  // XXX find out the default screen size and use that
  mEyeResolution.width = 1920 / 2;
  mEyeResolution.height = 1080;

  mConfiguration.hmdType = mType;
  mConfiguration.value = 0;
  mConfiguration.fov[0] = aFOVLeft;
  mConfiguration.fov[1] = aFOVRight;

  return true;
}

void
HMDInfoCardboard::FillDistortionConstants(uint32_t whichEye,
                                          const IntSize& textureSize, const IntRect& eyeViewport,
                                          const Size& destViewport, const Rect& destRect,
                                          VRDistortionConstants& values)
{
  // these modify the texture coordinates; texcoord * zw + xy
  values.eyeToSourceScaleAndOffset[0] = 0.0;
  values.eyeToSourceScaleAndOffset[1] = 0.0;
  values.eyeToSourceScaleAndOffset[2] = 1.0;
  values.eyeToSourceScaleAndOffset[3] = 1.0;

  // Our mesh positions are in the [-1..1] clip space; we give appropriate offset
  // and scaling for the right viewport.  (In the 0..2 space for sanity)

  // this is the destRect in clip space
  float x0 = destRect.x / destViewport.width * 2.0 - 1.0;
  float x1 = (destRect.x + destRect.width) / destViewport.width * 2.0 - 1.0;

  float y0 = destRect.y / destViewport.height * 2.0 - 1.0;
  float y1 = (destRect.y + destRect.height) / destViewport.height * 2.0 - 1.0;

  // offset
  values.destinationScaleAndOffset[0] = (x0+x1) / 2.0;
  values.destinationScaleAndOffset[1] = (y0+y1) / 2.0;
  // scale
  values.destinationScaleAndOffset[2] = destRect.width / destViewport.width;
  values.destinationScaleAndOffset[3] = destRect.height / destViewport.height;
}

void
HMDInfoCardboard::Destroy()
{
}



bool
VRHMDManagerCardboard::PlatformInit()
{
  return true;
}

bool
VRHMDManagerCardboard::Init()
{
  if (mCardboardInitialized)
    return true;

  nsRefPtr<HMDInfoCardboard> hmd = new HMDInfoCardboard();
  mCardboardHMDs.AppendElement(hmd);

  mCardboardInitialized = true;
  return true;
}

void
VRHMDManagerCardboard::Destroy()
{
  if (!mCardboardInitialized)
    return;

  for (size_t i = 0; i < mCardboardHMDs.Length(); ++i) {
    mCardboardHMDs[i]->Destroy();
  }

  mCardboardHMDs.Clear();
  mCardboardInitialized = false;
}

void
VRHMDManagerCardboard::GetHMDs(nsTArray<nsRefPtr<VRHMDInfo>>& aHMDResult)
{
  Init();
  for (size_t i = 0; i < mCardboardHMDs.Length(); ++i) {
    aHMDResult.AppendElement(mCardboardHMDs[i]);
  }
}