[PATCH 03/14] Bug 1179944, [webvr] support Oculus 0.6.0 runtime and draft
authorVladimir Vukicevic <vladimir@pobox.com>
Thu, 02 Jul 2015 11:58:24 -0400
changeset 504516 e4da19c5227b4d5e30cff0283d1cf4e6fe841650
parent 504515 a22bef86b7779d1ba3499b6fc9a50ac447877788
child 504517 54d48c16fdd0151a74bce73d06d3c273dd12945c
push id78004
push uservladimir@pobox.com
push dateMon, 13 Jul 2015 16:41:29 +0000
treeherdertry@0c66038689f2 [default view] [failures only]
bugs1179944
milestone42.0a1
[PATCH 03/14] Bug 1179944, [webvr] support Oculus 0.6.0 runtime and From cd510fa2e4314b1368014043d3ca490bbb0369d1 Mon Sep 17 00:00:00 2001 rendering; r=mstange --- gfx/2d/Quaternion.h | 10 + gfx/layers/Layers.h | 6 +- gfx/layers/composite/ContainerLayerComposite.cpp | 163 +++++++--- gfx/layers/composite/ContainerLayerComposite.h | 3 + gfx/thebes/gfxPrefs.h | 2 + gfx/vr/gfxVR.cpp | 56 ++++ gfx/vr/gfxVR.h | 37 +++ gfx/vr/gfxVRCardboard.cpp | 54 +--- gfx/vr/gfxVROculus.cpp | 367 +++++++++++++++-------- gfx/vr/gfxVROculus.h | 14 +- gfx/vr/moz.build | 10 + gfx/vr/ovr_capi_dynamic.h | 261 +++++++++++----- modules/libpref/init/all.js | 3 + 13 files changed, 696 insertions(+), 290 deletions(-)
gfx/2d/Quaternion.h
gfx/layers/Layers.h
gfx/layers/composite/ContainerLayerComposite.cpp
gfx/layers/composite/ContainerLayerComposite.h
gfx/thebes/gfxPrefs.h
gfx/vr/gfxVR.cpp
gfx/vr/gfxVR.h
gfx/vr/gfxVRCardboard.cpp
gfx/vr/gfxVROculus.cpp
gfx/vr/gfxVROculus.h
gfx/vr/moz.build
gfx/vr/ovr_capi_dynamic.h
modules/libpref/init/all.js
--- a/gfx/2d/Quaternion.h
+++ b/gfx/2d/Quaternion.h
@@ -87,14 +87,24 @@ public:
     }
     return *this;
   }
 
   Quaternion& Invert()
   {
     return Conjugate().Normalize();
   }
+
+  Point3D RotatePoint(const Point3D& aPoint) {
+    Float uvx = Float(2.0) * (y*aPoint.z - z*aPoint.y);
+    Float uvy = Float(2.0) * (z*aPoint.x - x*aPoint.z);
+    Float uvz = Float(2.0) * (x*aPoint.y - y*aPoint.x);
+
+    return Point3D(aPoint.x + w*uvx + y*uvz - z*uvy,
+                   aPoint.y + w*uvy + z*uvx - x*uvz,
+                   aPoint.z + w*uvz + x*uvy - y*uvx);
+  }
 };
 
 } // namespace gfx
 } // namespace mozilla
 
 #endif
--- a/gfx/layers/Layers.h
+++ b/gfx/layers/Layers.h
@@ -1673,17 +1673,17 @@ public:
    * returning the old one.  This is currently added as a hack for VR
    * rendering, and might go away if we find a better way to do this.
    * If you think you have a need for this method, talk with
    * vlad/mstange/mwoodrow first.
    */
   virtual gfx::Matrix4x4 ReplaceEffectiveTransform(const gfx::Matrix4x4& aNewEffectiveTransform) {
     gfx::Matrix4x4 old = mEffectiveTransform;
     mEffectiveTransform = aNewEffectiveTransform;
-    ComputeEffectiveTransformForMaskLayer(mEffectiveTransform);
+    ComputeEffectiveTransformForMaskLayers(mEffectiveTransform);
     return old;
   }
 
 protected:
   Layer(LayerManager* aManager, void* aImplData);
 
   // Protected destructor, to discourage deletion outside of Release():
   virtual ~Layer();
@@ -2067,17 +2067,17 @@ public:
    * rendering, and might go away if we find a better way to do this.
    * If you think you have a need for this method, talk with
    * vlad/mstange/mwoodrow first.
    */
   gfx::Matrix4x4 ReplaceEffectiveTransform(const gfx::Matrix4x4& aNewEffectiveTransform) override {
     gfx::Matrix4x4 old = mEffectiveTransform;
     mEffectiveTransform = aNewEffectiveTransform;
     ComputeEffectiveTransformsForChildren(mEffectiveTransform);
-    ComputeEffectiveTransformForMaskLayer(mEffectiveTransform);
+    ComputeEffectiveTransformForMaskLayers(mEffectiveTransform);
     return old;
   }
 
 protected:
   friend class ReadbackProcessor;
 
   void DidInsertChild(Layer* aLayer);
   void DidRemoveChild(Layer* aLayer);
@@ -2276,16 +2276,18 @@ public:
    */
   typedef void PreTransactionCallback(void* closureData);
   void SetPreTransactionCallback(PreTransactionCallback* callback, void* closureData)
   {
     mPreTransCallback = callback;
     mPreTransCallbackData = closureData;
   }
 
+  const nsIntRect& GetBounds() const { return mBounds; }
+
 protected:
   void FirePreTransactionCallback()
   {
     if (mPreTransCallback) {
       mPreTransCallback(mPreTransCallbackData);
     }
   }
 
--- a/gfx/layers/composite/ContainerLayerComposite.cpp
+++ b/gfx/layers/composite/ContainerLayerComposite.cpp
@@ -36,16 +36,21 @@
 #include "GeckoProfiler.h"              // for GeckoProfiler
 #ifdef MOZ_ENABLE_PROFILER_SPS
 #include "ProfilerMarkers.h"            // for ProfilerMarkers
 #endif
 
 #define CULLING_LOG(...)
 // #define CULLING_LOG(...) printf_stderr("CULLING: " __VA_ARGS__)
 
+#define DUMP(...) do { if (getenv("DUMP_DEBUG")) { printf_stderr(__VA_ARGS__); } } while(0)
+#define XYWH(k)  (k).x, (k).y, (k).width, (k).height
+#define XY(k)    (k).x, (k).y
+#define WH(k)    (k).width, (k).height
+
 namespace mozilla {
 namespace layers {
 
 using namespace gfx;
 
 static bool
 LayerHasCheckerboardingAPZC(Layer* aLayer, gfxRGBA* aOutColor)
 {
@@ -137,100 +142,182 @@ ContainerRenderVR(ContainerT* aContainer
                   gfx::VRHMDInfo* aHMD)
 {
   RefPtr<CompositingRenderTarget> surface;
 
   Compositor* compositor = aManager->GetCompositor();
 
   RefPtr<CompositingRenderTarget> previousTarget = compositor->GetCurrentRenderTarget();
 
-  gfx::IntRect visibleRect = aContainer->GetEffectiveVisibleRegion().GetBounds();
-
   float opacity = aContainer->GetEffectiveOpacity();
 
-  gfx::IntRect surfaceRect = gfx::IntRect(visibleRect.x, visibleRect.y,
-                                          visibleRect.width, visibleRect.height);
-  // we're about to create a framebuffer backed by textures to use as an intermediate
-  // surface. What to do if its size (as given by framebufferRect) would exceed the
-  // maximum texture size supported by the GL? The present code chooses the compromise
-  // of just clamping the framebuffer's size to the max supported size.
-  // This gives us a lower resolution rendering of the intermediate surface (children layers).
-  // See bug 827170 for a discussion.
+  // The size of each individual eye surface
+  gfx::IntSize eyeResolution = aHMD->SuggestedEyeResolution();
+  gfx::IntRect eyeRect[2];
+  eyeRect[0] = gfx::IntRect(0, 0, eyeResolution.width, eyeResolution.height);
+  eyeRect[1] = gfx::IntRect(eyeResolution.width, 0, eyeResolution.width, eyeResolution.height);
+
+  // The intermediate surface size; we're going to assume that we're not going to run
+  // into max texture size limits
+  gfx::IntRect surfaceRect = gfx::IntRect(0, 0, eyeResolution.width * 2, eyeResolution.height);
+
   int32_t maxTextureSize = compositor->GetMaxTextureSize();
   surfaceRect.width = std::min(maxTextureSize, surfaceRect.width);
   surfaceRect.height = std::min(maxTextureSize, surfaceRect.height);
 
-  // use NONE here, because we draw black to clear below
-  surface = compositor->CreateRenderTarget(surfaceRect, INIT_MODE_NONE);
-  if (!surface) {
-    return;
+  gfx::VRHMDRenderingSupport *vrRendering = aHMD->GetRenderingSupport();
+  if (PR_GetEnv("NO_VR_RENDERING")) vrRendering = nullptr;
+  if (vrRendering) {
+    if (!aContainer->mVRRenderTargetSet || aContainer->mVRRenderTargetSet->size != surfaceRect.Size()) {
+      aContainer->mVRRenderTargetSet = vrRendering->CreateRenderTargetSet(compositor, surfaceRect.Size());
+    }
+
+    surface = aContainer->mVRRenderTargetSet->GetNextRenderTarget();
+    if (!surface) {
+      NS_WARNING("GetNextRenderTarget failed");
+      return;
+    }
+  } else {
+    surface = compositor->CreateRenderTarget(surfaceRect, INIT_MODE_CLEAR);
+    if (!surface) {
+      return;
+    }
   }
 
+#ifdef DEBUG
+  gfx::IntRect rtBounds = previousTarget->GetRect();
+  DUMP("eyeResolution: %d %d targetRT: %d %d %d %d\n", WH(eyeResolution), XYWH(rtBounds));
+#endif
+
   compositor->SetRenderTarget(surface);
 
   nsAutoTArray<Layer*, 12> children;
   aContainer->SortChildrenBy3DZOrder(children);
 
-  /**
-   * Render this container's contents.
-   */
-  gfx::IntRect surfaceClipRect(0, 0, surfaceRect.width, surfaceRect.height);
-  RenderTargetIntRect rtClipRect(0, 0, surfaceRect.width, surfaceRect.height);
+  gfx::Matrix4x4 origTransform = aContainer->GetEffectiveTransform();
+
   for (uint32_t i = 0; i < children.Length(); i++) {
     LayerComposite* layerToRender = static_cast<LayerComposite*>(children.ElementAt(i)->ImplData());
     Layer* layer = layerToRender->GetLayer();
+    uint32_t contentFlags = layer->GetContentFlags();
 
     if (layer->GetEffectiveVisibleRegion().IsEmpty() &&
         !layer->AsContainerLayer()) {
       continue;
     }
 
-    RenderTargetIntRect clipRect = layer->CalculateScissorRect(rtClipRect);
-    if (clipRect.IsEmpty()) {
-      continue;
+    // We flip between pre-rendered and Gecko-rendered VR based on whether
+    // the child layer of this VR container layer has PRESERVE_3D or not.
+    if ((contentFlags & Layer::CONTENT_PRESERVE_3D) == 0) {
+      // This layer is native VR
+      DUMP("%p Switching to pre-rendered VR\n", aContainer);
+
+      // XXX we still need depth test here, but we have no way of preserving
+      // depth anyway in native VR layers until we have a way to save them
+      // from WebGL (and maybe depth video?)
+      compositor->SetRenderTarget(surface);
+      aContainer->ReplaceEffectiveTransform(origTransform);
+      
+      // If this native-VR child layer does not have sizes that match
+      // the eye resolution (that is, returned by the recommended
+      // render rect from the HMD device), then we need to scale it
+      // up/down.
+      nsIntRect layerBounds;
+      // XXX this is a hack! Canvas layers aren't reporting the
+      // proper bounds here (visible region bounds are 0,0,0,0)
+      // and I'm not sure if this is the bounds we want anyway.
+      if (layer->GetType() == Layer::TYPE_CANVAS) {
+        layerBounds = static_cast<CanvasLayer*>(layer)->GetBounds();
+      } else {
+        layerBounds = layer->GetEffectiveVisibleRegion().GetBounds();
+      }
+      DUMP("  layer %p [type %d] bounds [%d %d %d %d] surfaceRect [%d %d %d %d]\n", layer, (int) layer->GetType(),
+           XYWH(layerBounds), XYWH(surfaceRect));
+      
+      const gfx::Matrix4x4 childTransform = layer->GetEffectiveTransform();
+      bool restoreTransform = false;
+      if ((layerBounds.width != 0 && layerBounds.height != 0) &&
+          (layerBounds.width != surfaceRect.width ||
+           layerBounds.height != surfaceRect.height))
+      {
+        DUMP("  layer %p doesn't match, prescaling by %f %f\n", layer,
+             surfaceRect.width / float(layerBounds.width),
+             surfaceRect.height / float(layerBounds.height));
+        gfx::Matrix4x4 scaledChildTransform(childTransform);
+        scaledChildTransform.PreScale(surfaceRect.width / float(layerBounds.width),
+                                      surfaceRect.height / float(layerBounds.height),
+                                      1.0f);
+
+        layer->ReplaceEffectiveTransform(scaledChildTransform);
+        restoreTransform = true;
+      }
+
+      // XXX these are both clip rects, which end up as scissor rects in the compositor.  So we just
+      // pass the full target surface rect here.
+      layerToRender->Prepare(RenderTargetIntRect(surfaceRect.x, surfaceRect.y,
+                                                 surfaceRect.width, surfaceRect.height));
+      layerToRender->RenderLayer(surfaceRect);
+
+      if (restoreTransform) {
+        layer->ReplaceEffectiveTransform(childTransform);
+      }
+    } else {
+      // Gecko-rendered CSS VR -- not supported yet, so just don't render this layer!
     }
-
-    layerToRender->Prepare(rtClipRect);
-    layerToRender->RenderLayer(surfaceClipRect);
   }
 
-  // Unbind the current surface and rebind the previous one.
-#ifdef MOZ_DUMP_PAINTING
-  if (gfxUtils::sDumpPainting) {
-    RefPtr<gfx::DataSourceSurface> surf = surface->Dump(aManager->GetCompositor());
-    if (surf) {
-      WriteSnapshotToDumpFile(aContainer, surf);
+  DUMP(" -- ContainerRenderVR [%p] after child layers\n", aContainer);
+
+  // Now put back the original transfom on this container
+  aContainer->ReplaceEffectiveTransform(origTransform);
+
+  // then bind the original target and draw with distortion
+  compositor->SetRenderTarget(previousTarget);
+
+  if (vrRendering) {
+    vrRendering->SubmitFrame(aContainer->mVRRenderTargetSet);
+    DUMP("<<< ContainerRenderVR [used vrRendering] [%p]\n", aContainer);
+    if (!gfxPrefs::VRMirrorTextures()) {
+      return;
     }
   }
-#endif
 
-  compositor->SetRenderTarget(previousTarget);
-
-  gfx::Rect rect(visibleRect.x, visibleRect.y, visibleRect.width, visibleRect.height);
+  gfx::Rect rect(surfaceRect.x, surfaceRect.y, surfaceRect.width, surfaceRect.height);
   gfx::Rect clipRect(aClipRect.x, aClipRect.y, aClipRect.width, aClipRect.height);
 
   // The VR geometry may not cover the entire area; we need to fill with a solid color
   // first.
   // XXX should DrawQuad handle this on its own?  Is there a time where we wouldn't want
   // to do this? (e.g. something like Cardboard would not require distortion so will fill
   // the entire rect)
   EffectChain solidEffect(aContainer);
   solidEffect.mPrimaryEffect = new EffectSolidColor(Color(0.0, 0.0, 0.0, 1.0));
-  aManager->GetCompositor()->DrawQuad(rect, clipRect, solidEffect, opacity,
-                                      aContainer->GetEffectiveTransform());
+  aManager->GetCompositor()->DrawQuad(rect, rect, solidEffect, 1.0, gfx::Matrix4x4());
 
   // draw the temporary surface with VR distortion to the original destination
+  gfx::Matrix4x4 scaleTransform = aContainer->GetEffectiveTransform();
   EffectChain vrEffect(aContainer);
-  vrEffect.mPrimaryEffect = new EffectVRDistortion(aHMD, surface);
+  bool skipDistortion = vrRendering || PR_GetEnv("MOZ_GFX_VR_NO_DISTORTION");
+  if (skipDistortion) {
+    vrEffect.mPrimaryEffect = new EffectRenderTarget(surface);
+    scaleTransform.PreScale(rtBounds.width / float(surfaceRect.width),
+                            rtBounds.height / float(surfaceRect.height),
+                            1.0f);
+  } else {
+    vrEffect.mPrimaryEffect = new EffectVRDistortion(aHMD, surface);
+    // no need to scale, because the VRDistortion effect will sample from surface
+  }
 
   // XXX we shouldn't use visibleRect here -- the VR distortion needs to know the
   // full rect, not just the visible one.  Luckily, right now, VR distortion is only
   // rendered when the element is fullscreen, so the visibleRect will be right anyway.
   aManager->GetCompositor()->DrawQuad(rect, clipRect, vrEffect, opacity,
-                                      aContainer->GetEffectiveTransform());
+                                      scaleTransform);
+
+  DUMP("<<< ContainerRenderVR [%p]\n", aContainer);
 }
 
 /* all of the prepared data that we need in RenderLayer() */
 struct PreparedData
 {
   RefPtr<CompositingRenderTarget> mTmpTarget;
   nsAutoTArray<PreparedLayer, 12> mLayers;
   bool mNeedsSurfaceCopy;
--- a/gfx/layers/composite/ContainerLayerComposite.h
+++ b/gfx/layers/composite/ContainerLayerComposite.h
@@ -6,16 +6,17 @@
 #ifndef GFX_ContainerLayerComposite_H
 #define GFX_ContainerLayerComposite_H
 
 #include "Layers.h"                     // for Layer (ptr only), etc
 #include "mozilla/Attributes.h"         // for override
 #include "mozilla/UniquePtr.h"          // for UniquePtr
 #include "mozilla/layers/LayerManagerComposite.h"
 #include "mozilla/gfx/Rect.h"
+#include "gfxVR.h"
 
 namespace mozilla {
 namespace layers {
 
 class CompositableHost;
 class CompositingRenderTarget;
 struct PreparedData;
 
@@ -107,16 +108,17 @@ public:
     }
     return mPostYScale;
   }
 
   virtual const char* Name() const override { return "ContainerLayerComposite"; }
   UniquePtr<PreparedData> mPrepared;
 
   RefPtr<CompositingRenderTarget> mLastIntermediateSurface;
+  RefPtr<gfx::VRHMDRenderingSupport::RenderTargetSet> mVRRenderTargetSet;
 };
 
 class RefLayerComposite : public RefLayer,
                           public LayerComposite
 {
   template<class ContainerT>
   friend void ContainerPrepare(ContainerT* aContainer,
                                LayerManagerComposite* aManager,
@@ -172,14 +174,15 @@ public:
   virtual LayerComposite* AsLayerComposite() override { return this; }
 
   // ref layers don't use a compositable
   CompositableHost* GetCompositableHost() override { return nullptr; }
 
   virtual const char* Name() const override { return "RefLayerComposite"; }
   UniquePtr<PreparedData> mPrepared;
   RefPtr<CompositingRenderTarget> mLastIntermediateSurface;
+  nsRefPtr<gfx::VRHMDRenderingSupport::RenderTargetSet> mVRRenderTargetSet;
 };
 
 } // namespace layers
 } // namespace mozilla
 
 #endif /* GFX_ContainerLayerComposite_H */
--- a/gfx/thebes/gfxPrefs.h
+++ b/gfx/thebes/gfxPrefs.h
@@ -239,16 +239,18 @@ private:
   DECL_GFX_PREF(Once, "gfx.touch.resample",                    TouchResampling, bool, false);
 
   // These times should be in milliseconds
   DECL_GFX_PREF(Once, "gfx.touch.resample.delay-threshold",    TouchResampleVsyncDelayThreshold, int32_t, 20);
   DECL_GFX_PREF(Once, "gfx.touch.resample.max-predict",        TouchResampleMaxPredict, int32_t, 8);
   DECL_GFX_PREF(Once, "gfx.touch.resample.old-touch-threshold",TouchResampleOldTouchThreshold, int32_t, 17);
   DECL_GFX_PREF(Once, "gfx.touch.resample.vsync-adjust",       TouchVsyncSampleAdjust, int32_t, 5);
 
+  DECL_GFX_PREF(Once, "gfx.vr.mirror-textures",                VRMirrorTextures, bool, false);
+
   DECL_GFX_PREF(Live, "gfx.vsync.collect-scroll-transforms",   CollectScrollTransforms, bool, false);
   DECL_GFX_PREF(Once, "gfx.vsync.compositor",                  VsyncAlignedCompositor, bool, false);
   // On b2g, in really bad cases, I've seen up to 80 ms delays between touch events and the main thread
   // processing them. So 80 ms / 16 = 5 vsync events. Double it up just to be on the safe side, so 10.
   DECL_GFX_PREF(Once, "gfx.vsync.compositor.unobserve-count",  CompositorUnobserveCount, int32_t, 10);
   // Use vsync events generated by hardware
   DECL_GFX_PREF(Once, "gfx.vsync.hw-vsync.enabled",            HardwareVsyncEnabled, bool, false);
   DECL_GFX_PREF(Once, "gfx.vsync.refreshdriver",               VsyncAlignedRefreshDriver, bool, false);
--- a/gfx/vr/gfxVR.cpp
+++ b/gfx/vr/gfxVR.cpp
@@ -13,16 +13,23 @@
 #include "gfxPrefs.h"
 #include "gfxVR.h"
 #include "gfxVROculus.h"
 #include "gfxVRCardboard.h"
 
 #include "nsServiceManagerUtils.h"
 #include "nsIScreenManager.h"
 
+#include "mozilla/layers/Compositor.h"
+#include "mozilla/layers/TextureHost.h"
+
+#ifndef M_PI
+# define M_PI 3.14159265358979323846
+#endif
+
 using namespace mozilla;
 using namespace mozilla::gfx;
 
 // Dummy nsIScreen implementation, for when we just need to specify a size
 class FakeScreen : public nsIScreen
 {
 public:
   explicit FakeScreen(const IntRect& aScreenRect)
@@ -129,8 +136,57 @@ VRHMDManager::GetAllHMDs(nsTArray<nsRefP
   }
 }
 
 /* static */ uint32_t
 VRHMDManager::AllocateDeviceIndex()
 {
   return ++sDeviceBase;
 }
+
+/* static */ already_AddRefed<nsIScreen>
+VRHMDManager::MakeFakeScreen(int32_t x, int32_t y, uint32_t width, uint32_t height)
+{
+  nsCOMPtr<nsIScreen> screen = new FakeScreen(IntRect(x, y, width, height));
+  return screen.forget();
+}
+
+VRHMDRenderingSupport::RenderTargetSet::RenderTargetSet()
+  : currentRenderTarget(0)
+{
+}
+
+VRHMDRenderingSupport::RenderTargetSet::~RenderTargetSet()
+{
+}
+
+Matrix4x4
+VRFieldOfView::ConstructProjectionMatrix(float zNear, float zFar, bool rightHanded)
+{
+  float upTan = tan(upDegrees * M_PI / 180.0);
+  float downTan = tan(downDegrees * M_PI / 180.0);
+  float leftTan = tan(leftDegrees * M_PI / 180.0);
+  float rightTan = tan(rightDegrees * M_PI / 180.0);
+
+  float handednessScale = rightHanded ? -1.0 : 1.0;
+
+  float pxscale = 2.0f / (leftTan + rightTan);
+  float pxoffset = (leftTan - rightTan) * pxscale * 0.5;
+  float pyscale = 2.0f / (upTan + downTan);
+  float pyoffset = (upTan - downTan) * pyscale * 0.5;
+
+  Matrix4x4 mobj;
+  float *m = &mobj._11;
+
+  m[0*4+0] = pxscale;
+  m[2*4+0] = pxoffset * handednessScale;
+
+  m[1*4+1] = pyscale;
+  m[2*4+1] = -pyoffset * handednessScale;
+
+  m[2*4+2] = zFar / (zNear - zFar) * -handednessScale;
+  m[3*4+2] = (zFar * zNear) / (zNear - zFar);
+
+  m[2*4+3] = handednessScale;
+  m[3*4+3] = 0.0f;
+
+  return mobj;
+}
--- a/gfx/vr/gfxVR.h
+++ b/gfx/vr/gfxVR.h
@@ -12,16 +12,21 @@
 #include "nsCOMPtr.h"
 #include "nsRefPtr.h"
 
 #include "mozilla/gfx/2D.h"
 #include "mozilla/EnumeratedArray.h"
 #include "mozilla/Atomics.h"
 
 namespace mozilla {
+namespace layers {
+class Compositor;
+class CompositingRenderTarget;
+}
+
 namespace gfx {
 
 enum class VRHMDType : uint16_t {
   Oculus,
   Cardboard,
   NumHMDTypes
 };
 
@@ -53,16 +58,18 @@ struct VRFieldOfView {
 
   bool IsZero() const {
     return upDegrees == 0.0 ||
       rightDegrees == 0.0 ||
       downDegrees == 0.0 ||
       leftDegrees == 0.0;
   }
 
+  Matrix4x4 ConstructProjectionMatrix(float zNear, float zFar, bool rightHanded);
+
   double upDegrees;
   double rightDegrees;
   double downDegrees;
   double leftDegrees;
 };
 
 // 12 floats per vertex. Position, tex coordinates
 // for each channel, and 4 generic attributes
@@ -120,16 +127,40 @@ struct VRHMDConfiguration {
     return hmdType != VRHMDType::NumHMDTypes;
   }
 
   VRHMDType hmdType;
   uint32_t value;
   VRFieldOfView fov[2];
 };
 
+class VRHMDRenderingSupport {
+public:
+  struct RenderTargetSet {
+    RenderTargetSet();
+    
+    NS_INLINE_DECL_REFCOUNTING(RenderTargetSet)
+
+    nsRefPtr<layers::Compositor> compositor;
+    IntSize size;
+    nsTArray<nsRefPtr<layers::CompositingRenderTarget>> renderTargets;
+    int32_t currentRenderTarget;
+
+    virtual already_AddRefed<layers::CompositingRenderTarget> GetNextRenderTarget() = 0;
+  protected:
+    virtual ~RenderTargetSet();
+  };
+
+  virtual already_AddRefed<RenderTargetSet> CreateRenderTargetSet(layers::Compositor *aCompositor, const IntSize& aSize) = 0;
+  virtual void DestroyRenderTargetSet(RenderTargetSet *aRTSet) = 0;
+  virtual void SubmitFrame(RenderTargetSet *aRTSet) = 0;
+protected:
+  VRHMDRenderingSupport() { }
+};
+
 class VRHMDInfo {
 public:
   enum Eye {
     Eye_Left,
     Eye_Right,
     NumEyes
   };
 
@@ -165,16 +196,21 @@ public:
 
   virtual uint32_t GetSupportedSensorStateBits() { return mSupportedSensorBits; }
   virtual bool StartSensorTracking() = 0;
   virtual VRHMDSensorState GetSensorState(double timeOffset = 0.0) = 0;
   virtual void StopSensorTracking() = 0;
 
   virtual void ZeroSensor() = 0;
 
+
+  // if rendering is offloaded
+  virtual VRHMDRenderingSupport *GetRenderingSupport() { return nullptr; }
+
+  // distortion mesh stuff; we should implement renderingsupport for this
   virtual void FillDistortionConstants(uint32_t whichEye,
                                        const IntSize& textureSize, // the full size of the texture
                                        const IntRect& eyeViewport, // the viewport within the texture for the current eye
                                        const Size& destViewport,   // the size of the destination viewport
                                        const Rect& destRect,       // the rectangle within the dest viewport that this should be rendered
                                        VRDistortionConstants& values) = 0;
 
   virtual const VRDistortionMesh& GetDistortionMesh(uint32_t whichEye) const { return mDistortionMesh[whichEye]; }
@@ -205,16 +241,17 @@ protected:
 };
 
 class VRHMDManager {
 public:
   static void ManagerInit();
   static void ManagerDestroy();
   static void GetAllHMDs(nsTArray<nsRefPtr<VRHMDInfo>>& aHMDResult);
   static uint32_t AllocateDeviceIndex();
+  static already_AddRefed<nsIScreen> MakeFakeScreen(int32_t x, int32_t y, uint32_t width, uint32_t height);
 
 protected:
   typedef nsTArray<nsRefPtr<VRHMDManager>> VRHMDManagerArray;
   static VRHMDManagerArray *sManagers;
   static Atomic<uint32_t> sDeviceBase;
 
 public:
   NS_INLINE_DECL_THREADSAFE_REFCOUNTING(VRHMDManager)
--- a/gfx/vr/gfxVRCardboard.cpp
+++ b/gfx/vr/gfxVRCardboard.cpp
@@ -21,20 +21,16 @@
 
 #ifdef ANDROID
 #include <android/log.h>
 #define LOG(args...)  __android_log_print(ANDROID_LOG_INFO, "GeckoVR" , ## args)
 #else
 #define LOG(...) do { } while(0)
 #endif
 
-#ifndef M_PI
-# define M_PI 3.14159265358979323846
-#endif
-
 // 1/sqrt(2) (aka sqrt(2)/2)
 #ifndef M_SQRT1_2
 # define M_SQRT1_2	0.70710678118654752440
 #endif
 
 using namespace mozilla::dom;
 using namespace mozilla::gfx;
 using namespace mozilla::gfx::impl;
@@ -94,16 +90,28 @@ HMDInfoCardboard::HMDInfoCardboard()
 
   mRecommendedEyeFOV[Eye_Left] = VRFieldOfView(45.0, 45.0, 45.0, 45.0);
   mRecommendedEyeFOV[Eye_Right] = VRFieldOfView(45.0, 45.0, 45.0, 45.0);
 
   mMaximumEyeFOV[Eye_Left] = VRFieldOfView(45.0, 45.0, 45.0, 45.0);
   mMaximumEyeFOV[Eye_Right] = VRFieldOfView(45.0, 45.0, 45.0, 45.0);
 
   SetFOV(mRecommendedEyeFOV[Eye_Left], mRecommendedEyeFOV[Eye_Right], 0.01, 10000.0);
+
+#if 1
+  int32_t xcoord = 0;
+  if (getenv("FAKE_CARDBOARD_SCREEN")) {
+      const char *env = getenv("FAKE_CARDBOARD_SCREEN");
+      nsresult err;
+      xcoord = nsCString(env).ToInteger(&err);
+      if (err != NS_OK) xcoord = 0;
+  }
+  mScreen = VRHMDManager::MakeFakeScreen(xcoord, 0, 1920, 1080);
+#endif
+
 }
 
 bool
 HMDInfoCardboard::StartSensorTracking()
 {
   LOG("HMDInfoCardboard::StartSensorTracking %d\n", mStartCount);
   if (mStartCount == 0) {
     // it's never been started before; initialize observers and
@@ -124,19 +132,19 @@ HMDInfoCardboard::StartSensorTracking()
 }
 
 void
 HMDInfoCardboard::Notify(const mozilla::hal::ScreenConfiguration& config)
 {
   mOrient = config.orientation();
 
   if (mOrient == eScreenOrientation_LandscapePrimary) {
-    mScreenTransform = Quaternion(0.f, 0.f, M_SQRT1_2, M_SQRT1_2);
+    mScreenTransform = Quaternion(0.f, 0.f, (float) M_SQRT1_2, (float) M_SQRT1_2);
   } else if (mOrient == eScreenOrientation_LandscapeSecondary) {
-    mScreenTransform = Quaternion(0.f, 0.f, -M_SQRT1_2, M_SQRT1_2);
+    mScreenTransform = Quaternion(0.f, 0.f, (float) -M_SQRT1_2, (float) M_SQRT1_2);
   } else if (mOrient == eScreenOrientation_PortraitPrimary) {
     mScreenTransform = Quaternion();
   } else if (mOrient == eScreenOrientation_PortraitSecondary) {
     mScreenTransform = Quaternion(0.f, 0.f, 1.f, 0.f);
   }
 }
 
 void
@@ -210,59 +218,27 @@ HMDInfoCardboard::StopSensorTracking()
 
 void
 HMDInfoCardboard::ZeroSensor()
 {
   mSensorZeroInverse = mSavedLastSensor;
   mSensorZeroInverse.Invert();
 }
 
-static Matrix4x4
-ConstructProjectionMatrix(const VRFieldOfView& fov, bool rightHanded, double zNear, double zFar)
-{
-  float upTan = tan(fov.upDegrees * M_PI / 180.0);
-  float downTan = tan(fov.downDegrees * M_PI / 180.0);
-  float leftTan = tan(fov.leftDegrees * M_PI / 180.0);
-  float rightTan = tan(fov.rightDegrees * M_PI / 180.0);
-
-  float handednessScale = rightHanded ? -1.0 : 1.0;
-
-  float pxscale = 2.0f / (leftTan + rightTan);
-  float pxoffset = (leftTan - rightTan) * pxscale * 0.5;
-  float pyscale = 2.0f / (upTan + downTan);
-  float pyoffset = (upTan - downTan) * pyscale * 0.5;
-
-  Matrix4x4 mobj;
-  float *m = &mobj._11;
-
-  m[0*4+0] = pxscale;
-  m[0*4+2] = pxoffset * handednessScale;
-
-  m[1*4+1] = pyscale;
-  m[1*4+2] = -pyoffset * handednessScale;
-
-  m[2*4+2] = zFar / (zNear - zFar) * -handednessScale;
-  m[2*4+3] = (zFar * zNear) / (zNear - zFar);
-
-  m[3*4+2] = handednessScale;
-
-  return mobj;
-}
-
 bool
 HMDInfoCardboard::SetFOV(const VRFieldOfView& aFOVLeft,
                          const VRFieldOfView& aFOVRight,
                          double zNear, double zFar)
 {
   const float standardIPD = 0.064f;
 
   for (uint32_t eye = 0; eye < NumEyes; eye++) {
     mEyeFOV[eye] = eye == Eye_Left ? aFOVLeft : aFOVRight;
     mEyeTranslation[eye] = Point3D(standardIPD * (eye == Eye_Left ? -1.0 : 1.0), 0.0, 0.0);
-    mEyeProjectionMatrix[eye] = ConstructProjectionMatrix(mEyeFOV[eye], true, zNear, zFar);
+    mEyeProjectionMatrix[eye] = mEyeFOV[eye].ConstructProjectionMatrix(zNear, zFar, true);
 
     mDistortionMesh[eye].mVertices.SetLength(4);
     mDistortionMesh[eye].mIndices.SetLength(6);
 
     HMDInfoCardboard::DistortionVertex *destv = reinterpret_cast<HMDInfoCardboard::DistortionVertex*>(mDistortionMesh[eye].mVertices.Elements());
     float xoffs = eye == Eye_Left ? 0.0f : 1.0f;
     float txoffs = eye == Eye_Left ? 0.0f : 0.5f;
     destv[0].pos[0] = -1.0 + xoffs;
--- a/gfx/vr/gfxVROculus.cpp
+++ b/gfx/vr/gfxVROculus.cpp
@@ -7,16 +7,22 @@
 
 #include "prlink.h"
 #include "prmem.h"
 #include "prenv.h"
 #include "gfxPrefs.h"
 #include "nsString.h"
 #include "mozilla/Preferences.h"
 
+#include "mozilla/gfx/Quaternion.h"
+
+#ifdef XP_WIN
+#include "../layers/d3d11/CompositorD3D11.h"
+#endif
+
 #include "gfxVROculus.h"
 
 #include "nsServiceManagerUtils.h"
 #include "nsIScreenManager.h"
 
 #ifndef M_PI
 # define M_PI 3.14159265358979323846
 #endif
@@ -24,52 +30,46 @@
 using namespace mozilla::gfx;
 using namespace mozilla::gfx::impl;
 
 namespace {
 
 #ifdef OVR_CAPI_LIMITED_MOZILLA
 static pfn_ovr_Initialize ovr_Initialize = nullptr;
 static pfn_ovr_Shutdown ovr_Shutdown = nullptr;
+static pfn_ovr_GetTimeInSeconds ovr_GetTimeInSeconds = nullptr;
+
 static pfn_ovrHmd_Detect ovrHmd_Detect = nullptr;
 static pfn_ovrHmd_Create ovrHmd_Create = nullptr;
+static pfn_ovrHmd_CreateDebug ovrHmd_CreateDebug = nullptr;
 static pfn_ovrHmd_Destroy ovrHmd_Destroy = nullptr;
-static pfn_ovrHmd_CreateDebug ovrHmd_CreateDebug = nullptr;
-static pfn_ovrHmd_GetLastError ovrHmd_GetLastError = nullptr;
-static pfn_ovrHmd_AttachToWindow ovrHmd_AttachToWindow = nullptr;
-static pfn_ovrHmd_GetEnabledCaps ovrHmd_GetEnabledCaps = nullptr;
-static pfn_ovrHmd_SetEnabledCaps ovrHmd_SetEnabledCaps = nullptr;
+
 static pfn_ovrHmd_ConfigureTracking ovrHmd_ConfigureTracking = nullptr;
 static pfn_ovrHmd_RecenterPose ovrHmd_RecenterPose = nullptr;
 static pfn_ovrHmd_GetTrackingState ovrHmd_GetTrackingState = nullptr;
 static pfn_ovrHmd_GetFovTextureSize ovrHmd_GetFovTextureSize = nullptr;
 static pfn_ovrHmd_GetRenderDesc ovrHmd_GetRenderDesc = nullptr;
-static pfn_ovrHmd_CreateDistortionMesh ovrHmd_CreateDistortionMesh = nullptr;
-static pfn_ovrHmd_DestroyDistortionMesh ovrHmd_DestroyDistortionMesh = nullptr;
-static pfn_ovrHmd_GetRenderScaleAndOffset ovrHmd_GetRenderScaleAndOffset = nullptr;
-static pfn_ovrHmd_GetFrameTiming ovrHmd_GetFrameTiming = nullptr;
-static pfn_ovrHmd_BeginFrameTiming ovrHmd_BeginFrameTiming = nullptr;
-static pfn_ovrHmd_EndFrameTiming ovrHmd_EndFrameTiming = nullptr;
-static pfn_ovrHmd_ResetFrameTiming ovrHmd_ResetFrameTiming = nullptr;
-static pfn_ovrHmd_GetEyePoses ovrHmd_GetEyePoses = nullptr;
-static pfn_ovrHmd_GetHmdPosePerEye ovrHmd_GetHmdPosePerEye = nullptr;
-static pfn_ovrHmd_GetEyeTimewarpMatrices ovrHmd_GetEyeTimewarpMatrices = nullptr;
-static pfn_ovrMatrix4f_Projection ovrMatrix4f_Projection = nullptr;
-static pfn_ovrMatrix4f_OrthoSubProjection ovrMatrix4f_OrthoSubProjection = nullptr;
-static pfn_ovr_GetTimeInSeconds ovr_GetTimeInSeconds = nullptr;
+
+static pfn_ovrHmd_DestroySwapTextureSet ovrHmd_DestroySwapTextureSet = nullptr;
+static pfn_ovrHmd_SubmitFrame ovrHmd_SubmitFrame = nullptr;
+
+#ifdef XP_WIN
+static pfn_ovrHmd_CreateSwapTextureSetD3D11 ovrHmd_CreateSwapTextureSetD3D11 = nullptr;
+#endif
+static pfn_ovrHmd_CreateSwapTextureSetGL ovrHmd_CreateSwapTextureSetGL = nullptr;
 
 #ifdef HAVE_64BIT_BUILD
 #define BUILD_BITS 64
 #else
 #define BUILD_BITS 32
 #endif
 
-#define LIBOVR_PRODUCT_VERSION 0
-#define LIBOVR_MAJOR_VERSION   5
-#define LIBOVR_MINOR_VERSION   0
+#define OVR_PRODUCT_VERSION 0
+#define OVR_MAJOR_VERSION   6
+#define OVR_MINOR_VERSION   0
 
 static bool
 InitializeOculusCAPI()
 {
   static PRLibrary *ovrlib = nullptr;
 
   if (!ovrlib) {
     nsTArray<nsCString> libSearchPaths;
@@ -85,36 +85,36 @@ InitializeOculusCAPI()
 #if defined(_WIN32)
     static const int pathLen = 260;
     searchPath.SetCapacity(pathLen);
     int realLen = ::GetSystemDirectoryA(searchPath.BeginWriting(), pathLen);
     if (realLen != 0 && realLen < pathLen) {
       searchPath.SetLength(realLen);
       libSearchPaths.AppendElement(searchPath);
     }
-    libName.AppendPrintf("LibOVRRT%d_%d_%d.dll", BUILD_BITS, LIBOVR_PRODUCT_VERSION, LIBOVR_MAJOR_VERSION);
+    libName.AppendPrintf("LibOVRRT%d_%d_%d.dll", BUILD_BITS, OVR_PRODUCT_VERSION, OVR_MAJOR_VERSION);
 #elif defined(__APPLE__)
     searchPath.Truncate();
-    searchPath.AppendPrintf("/Library/Frameworks/LibOVRRT_%d.framework/Versions/%d", LIBOVR_PRODUCT_VERSION, LIBOVR_MAJOR_VERSION);
+    searchPath.AppendPrintf("/Library/Frameworks/LibOVRRT_%d.framework/Versions/%d", OVR_PRODUCT_VERSION, OVR_MAJOR_VERSION);
     libSearchPaths.AppendElement(searchPath);
 
     if (PR_GetEnv("HOME")) {
       searchPath.Truncate();
-      searchPath.AppendPrintf("%s/Library/Frameworks/LibOVRRT_%d.framework/Versions/%d", PR_GetEnv("HOME"), LIBOVR_PRODUCT_VERSION, LIBOVR_MAJOR_VERSION);
+      searchPath.AppendPrintf("%s/Library/Frameworks/LibOVRRT_%d.framework/Versions/%d", PR_GetEnv("HOME"), OVR_PRODUCT_VERSION, OVR_MAJOR_VERSION);
       libSearchPaths.AppendElement(searchPath);
     }
     // The following will match the va_list overload of AppendPrintf if the product version is 0
     // That's bad times.
-    //libName.AppendPrintf("LibOVRRT_%d", LIBOVR_PRODUCT_VERSION);
+    //libName.AppendPrintf("LibOVRRT_%d", OVR_PRODUCT_VERSION);
     libName.Append("LibOVRRT_");
-    libName.AppendInt(LIBOVR_PRODUCT_VERSION);
+    libName.AppendInt(OVR_PRODUCT_VERSION);
 #else
     libSearchPaths.AppendElement(nsCString("/usr/local/lib"));
     libSearchPaths.AppendElement(nsCString("/usr/lib"));
-    libName.AppendPrintf("libOVRRT%d_%d.so.%d", BUILD_BITS, LIBOVR_PRODUCT_VERSION, LIBOVR_MAJOR_VERSION);
+    libName.AppendPrintf("libOVRRT%d_%d.so.%d", BUILD_BITS, OVR_PRODUCT_VERSION, OVR_MAJOR_VERSION);
 #endif
 
     // If the pref is present, we override libName
     nsAdoptingCString prefLibPath = mozilla::Preferences::GetCString("dom.vr.ovr_lib_path");
     if (prefLibPath && prefLibPath.get()) {
       libSearchPaths.InsertElementsAt(0, 1, prefLibPath);
     }
 
@@ -162,61 +162,75 @@ InitializeOculusCAPI()
 
 #define REQUIRE_FUNCTION(_x) do { \
     *(void **)&_x = (void *) PR_FindSymbol(ovrlib, #_x);                \
     if (!_x) { printf_stderr(#_x " symbol missing\n"); goto fail; }       \
   } while (0)
 
   REQUIRE_FUNCTION(ovr_Initialize);
   REQUIRE_FUNCTION(ovr_Shutdown);
+  REQUIRE_FUNCTION(ovr_GetTimeInSeconds);
+  
   REQUIRE_FUNCTION(ovrHmd_Detect);
   REQUIRE_FUNCTION(ovrHmd_Create);
+  REQUIRE_FUNCTION(ovrHmd_CreateDebug);
   REQUIRE_FUNCTION(ovrHmd_Destroy);
-  REQUIRE_FUNCTION(ovrHmd_CreateDebug);
-  REQUIRE_FUNCTION(ovrHmd_GetLastError);
-  REQUIRE_FUNCTION(ovrHmd_AttachToWindow);
-  REQUIRE_FUNCTION(ovrHmd_GetEnabledCaps);
-  REQUIRE_FUNCTION(ovrHmd_SetEnabledCaps);
+  
   REQUIRE_FUNCTION(ovrHmd_ConfigureTracking);
   REQUIRE_FUNCTION(ovrHmd_RecenterPose);
   REQUIRE_FUNCTION(ovrHmd_GetTrackingState);
-
   REQUIRE_FUNCTION(ovrHmd_GetFovTextureSize);
   REQUIRE_FUNCTION(ovrHmd_GetRenderDesc);
-  REQUIRE_FUNCTION(ovrHmd_CreateDistortionMesh);
-  REQUIRE_FUNCTION(ovrHmd_DestroyDistortionMesh);
-  REQUIRE_FUNCTION(ovrHmd_GetRenderScaleAndOffset);
-  REQUIRE_FUNCTION(ovrHmd_GetFrameTiming);
-  REQUIRE_FUNCTION(ovrHmd_BeginFrameTiming);
-  REQUIRE_FUNCTION(ovrHmd_EndFrameTiming);
-  REQUIRE_FUNCTION(ovrHmd_ResetFrameTiming);
-  REQUIRE_FUNCTION(ovrHmd_GetEyePoses);
-  REQUIRE_FUNCTION(ovrHmd_GetHmdPosePerEye);
-  REQUIRE_FUNCTION(ovrHmd_GetEyeTimewarpMatrices);
-  REQUIRE_FUNCTION(ovrMatrix4f_Projection);
-  REQUIRE_FUNCTION(ovrMatrix4f_OrthoSubProjection);
-  REQUIRE_FUNCTION(ovr_GetTimeInSeconds);
+
+  REQUIRE_FUNCTION(ovrHmd_DestroySwapTextureSet);
+  REQUIRE_FUNCTION(ovrHmd_SubmitFrame);
+#ifdef XP_WIN
+  REQUIRE_FUNCTION(ovrHmd_CreateSwapTextureSetD3D11);
+#endif
+  REQUIRE_FUNCTION(ovrHmd_CreateSwapTextureSetGL);
 
 #undef REQUIRE_FUNCTION
 
   return true;
 
  fail:
   ovr_Initialize = nullptr;
   return false;
 }
 
 #else
+#include <OVR_Version.h>
 // we're statically linked; it's available
 static bool InitializeOculusCAPI()
 {
   return true;
 }
+
 #endif
 
+static void
+do_CalcEyePoses(ovrPosef headPose,
+                const ovrVector3f hmdToEyeViewOffset[2],
+                ovrPosef outEyePoses[2])
+{
+  if (!hmdToEyeViewOffset || !outEyePoses)
+    return;
+
+  for (uint32_t i = 0; i < 2; ++i) {
+    gfx::Quaternion o(headPose.Orientation.x, headPose.Orientation.y, headPose.Orientation.z, headPose.Orientation.w);
+    Point3D vo(hmdToEyeViewOffset[i].x, hmdToEyeViewOffset[i].y, hmdToEyeViewOffset[i].z);
+    Point3D p = o.RotatePoint(vo);
+
+    outEyePoses[i].Orientation = headPose.Orientation;
+    outEyePoses[i].Position.x = p.x + headPose.Position.x;
+    outEyePoses[i].Position.y = p.y + headPose.Position.y;
+    outEyePoses[i].Position.z = p.z + headPose.Position.z;
+  }
+}
+
 ovrFovPort
 ToFovPort(const VRFieldOfView& aFOV)
 {
   ovrFovPort fovPort;
   fovPort.LeftTan = tan(aFOV.leftDegrees * M_PI / 180.0);
   fovPort.RightTan = tan(aFOV.rightDegrees * M_PI / 180.0);
   fovPort.UpTan = tan(aFOV.upDegrees * M_PI / 180.0);
   fovPort.DownTan = tan(aFOV.downDegrees * M_PI / 180.0);
@@ -257,22 +271,32 @@ HMDInfoOculus::HMDInfoOculus(ovrHmd aHMD
   mRecommendedEyeFOV[Eye_Left] = FromFovPort(mHMD->DefaultEyeFov[ovrEye_Left]);
   mRecommendedEyeFOV[Eye_Right] = FromFovPort(mHMD->DefaultEyeFov[ovrEye_Right]);
 
   mMaximumEyeFOV[Eye_Left] = FromFovPort(mHMD->MaxEyeFov[ovrEye_Left]);
   mMaximumEyeFOV[Eye_Right] = FromFovPort(mHMD->MaxEyeFov[ovrEye_Right]);
 
   SetFOV(mRecommendedEyeFOV[Eye_Left], mRecommendedEyeFOV[Eye_Right], 0.01, 10000.0);
 
-  nsCOMPtr<nsIScreenManager> screenmgr = do_GetService("@mozilla.org/gfx/screenmanager;1");
-  if (screenmgr) {
-    screenmgr->ScreenForRect(mHMD->WindowsPos.x, mHMD->WindowsPos.y,
-                             mHMD->Resolution.w, mHMD->Resolution.h,
-                             getter_AddRefs(mScreen));
+#if 1
+  int32_t xcoord = 0;
+  if (getenv("FAKE_OCULUS_SCREEN")) {
+      const char *env = getenv("FAKE_OCULUS_SCREEN");
+      nsresult err;
+      xcoord = nsCString(env).ToInteger(&err);
+      if (err != NS_OK) xcoord = 0;
   }
+  uint32_t w = mHMD->Resolution.w;
+  uint32_t h = mHMD->Resolution.h;
+  mScreen = VRHMDManager::MakeFakeScreen(xcoord, 0, std::max(w, h), std::min(w, h));
+
+#ifdef DEBUG
+  printf_stderr("OCULUS SCREEN: %d %d %d %d\n", xcoord, 0, std::max(w, h), std::min(w, h));
+#endif
+#endif
 }
 
 void
 HMDInfoOculus::Destroy()
 {
   if (mHMD) {
     ovrHmd_Destroy(mHMD);
     mHMD = nullptr;
@@ -281,117 +305,52 @@ HMDInfoOculus::Destroy()
 
 bool
 HMDInfoOculus::SetFOV(const VRFieldOfView& aFOVLeft, const VRFieldOfView& aFOVRight,
                       double zNear, double zFar)
 {
   float pixelsPerDisplayPixel = 1.0;
   ovrSizei texSize[2];
 
-  uint32_t caps = ovrDistortionCap_Chromatic | ovrDistortionCap_Vignette; // XXX TODO add TimeWarp
-
   // get eye parameters and create the mesh
   for (uint32_t eye = 0; eye < NumEyes; eye++) {
     mEyeFOV[eye] = eye == 0 ? aFOVLeft : aFOVRight;
     mFOVPort[eye] = ToFovPort(mEyeFOV[eye]);
 
     ovrEyeRenderDesc renderDesc = ovrHmd_GetRenderDesc(mHMD, (ovrEyeType) eye, mFOVPort[eye]);
 
-    // these values are negated so that content can add the adjustment to its camera position,
-    // instead of subtracting
-    mEyeTranslation[eye] = Point3D(-renderDesc.HmdToEyeViewOffset.x, -renderDesc.HmdToEyeViewOffset.y, -renderDesc.HmdToEyeViewOffset.z);
+    // As of Oculus 0.6.0, the HmdToEyeViewOffset values are correct and don't need to be negated.
+    mEyeTranslation[eye] = Point3D(renderDesc.HmdToEyeViewOffset.x, renderDesc.HmdToEyeViewOffset.y, renderDesc.HmdToEyeViewOffset.z);
 
     // note that we are using a right-handed coordinate system here, to match CSS
-    ovrMatrix4f projMatrix = ovrMatrix4f_Projection(mFOVPort[eye], zNear, zFar, true);
-
-    // XXX this is gross, we really need better methods on Matrix4x4
-    memcpy(&mEyeProjectionMatrix[eye], projMatrix.M, sizeof(ovrMatrix4f));
-    mEyeProjectionMatrix[eye].Transpose();
+    mEyeProjectionMatrix[eye] = mEyeFOV[eye].ConstructProjectionMatrix(zNear, zFar, true);
 
     texSize[eye] = ovrHmd_GetFovTextureSize(mHMD, (ovrEyeType) eye, mFOVPort[eye], pixelsPerDisplayPixel);
-
-    ovrDistortionMesh mesh;
-    bool ok = ovrHmd_CreateDistortionMesh(mHMD, (ovrEyeType) eye, mFOVPort[eye], caps, &mesh);
-    if (!ok)
-      return false;
-
-    mDistortionMesh[eye].mVertices.SetLength(mesh.VertexCount);
-    mDistortionMesh[eye].mIndices.SetLength(mesh.IndexCount);
-
-    ovrDistortionVertex *srcv = mesh.pVertexData;
-    HMDInfoOculus::DistortionVertex *destv = reinterpret_cast<HMDInfoOculus::DistortionVertex*>(mDistortionMesh[eye].mVertices.Elements());
-    memset(destv, 0, mesh.VertexCount * sizeof(VRDistortionVertex));
-    for (uint32_t i = 0; i < mesh.VertexCount; ++i) {
-      destv[i].pos[0] = srcv[i].ScreenPosNDC.x;
-      destv[i].pos[1] = srcv[i].ScreenPosNDC.y;
-
-      destv[i].texR[0] = srcv[i].TanEyeAnglesR.x;
-      destv[i].texR[1] = srcv[i].TanEyeAnglesR.y;
-      destv[i].texG[0] = srcv[i].TanEyeAnglesG.x;
-      destv[i].texG[1] = srcv[i].TanEyeAnglesG.y;
-      destv[i].texB[0] = srcv[i].TanEyeAnglesB.x;
-      destv[i].texB[1] = srcv[i].TanEyeAnglesB.y;
-
-      destv[i].genericAttribs[0] = srcv[i].VignetteFactor;
-      destv[i].genericAttribs[1] = srcv[i].TimeWarpFactor;
-    }
-
-    memcpy(mDistortionMesh[eye].mIndices.Elements(), mesh.pIndexData, mesh.IndexCount * sizeof(uint16_t));
-    ovrHmd_DestroyDistortionMesh(&mesh);
   }
 
   // take the max of both for eye resolution
   mEyeResolution.width = std::max(texSize[Eye_Left].w, texSize[Eye_Right].w);
   mEyeResolution.height = std::max(texSize[Eye_Left].h, texSize[Eye_Right].h);
 
   mConfiguration.hmdType = mType;
   mConfiguration.value = 0;
   mConfiguration.fov[0] = aFOVLeft;
   mConfiguration.fov[1] = aFOVRight;
 
   return true;
-  //* need to call this during rendering each frame I think? */
-  //ovrHmd_GetRenderScaleAndOffset(fovPort, texSize, renderViewport, uvScaleOffsetOut);
 }
 
 void
 HMDInfoOculus::FillDistortionConstants(uint32_t whichEye,
                                        const IntSize& textureSize,
                                        const IntRect& eyeViewport,
                                        const Size& destViewport,
                                        const Rect& destRect,
                                        VRDistortionConstants& values)
 {
-  ovrSizei texSize = { textureSize.width, textureSize.height };
-  ovrRecti eyePort = { { eyeViewport.x, eyeViewport.y }, { eyeViewport.width, eyeViewport.height } };
-  ovrVector2f scaleOut[2];
-
-  ovrHmd_GetRenderScaleAndOffset(mFOVPort[whichEye], texSize, eyePort, scaleOut);
-
-  values.eyeToSourceScaleAndOffset[0] = scaleOut[1].x;
-  values.eyeToSourceScaleAndOffset[1] = scaleOut[1].y;
-  values.eyeToSourceScaleAndOffset[2] = scaleOut[0].x;
-  values.eyeToSourceScaleAndOffset[3] = scaleOut[0].y;
-
-  // These values are in clip space [-1..1] range, but we're providing
-  // scaling in the 0..2 space for sanity.
-
-  // this is the destRect in clip space
-  float x0 = destRect.x / destViewport.width * 2.0 - 1.0;
-  float x1 = (destRect.x + destRect.width) / destViewport.width * 2.0 - 1.0;
-
-  float y0 = destRect.y / destViewport.height * 2.0 - 1.0;
-  float y1 = (destRect.y + destRect.height) / destViewport.height * 2.0 - 1.0;
-
-  // offset
-  values.destinationScaleAndOffset[0] = (x0+x1) / 2.0;
-  values.destinationScaleAndOffset[1] = (y0+y1) / 2.0;
-  // scale
-  values.destinationScaleAndOffset[2] = destRect.width / destViewport.width;
-  values.destinationScaleAndOffset[3] = destRect.height / destViewport.height;
 }
 
 bool
 HMDInfoOculus::StartSensorTracking()
 {
   if (mStartCount == 0) {
     bool ok = ovrHmd_ConfigureTracking(mHMD, ovrTrackingCap_Orientation | ovrTrackingCap_Position, 0);
     if (!ok)
@@ -457,72 +416,226 @@ HMDInfoOculus::GetSensorState(double tim
     result.linearVelocity[1] = pose.LinearVelocity.y;
     result.linearVelocity[2] = pose.LinearVelocity.z;
 
     result.linearAcceleration[0] = pose.LinearAcceleration.x;
     result.linearAcceleration[1] = pose.LinearAcceleration.y;
     result.linearAcceleration[2] = pose.LinearAcceleration.z;
   }
 
+  mLastTrackingState = state;
+  
   return result;
 }
 
+struct RenderTargetSetOculus : public VRHMDRenderingSupport::RenderTargetSet
+{
+  RenderTargetSetOculus(const IntSize& aSize,
+                        HMDInfoOculus *aHMD,
+                        ovrSwapTextureSet *aTS)
+    : hmd(aHMD)
+  {
+    textureSet = aTS;
+    size = aSize;
+  }
+  
+  already_AddRefed<layers::CompositingRenderTarget> GetNextRenderTarget() override {
+    currentRenderTarget = (currentRenderTarget + 1) % renderTargets.Length();
+    textureSet->CurrentIndex = currentRenderTarget;
+    renderTargets[currentRenderTarget]->ClearOnBind();
+    nsRefPtr<layers::CompositingRenderTarget> rt = renderTargets[currentRenderTarget];
+    return rt.forget();
+  }
+
+  void Destroy() {
+    if (!hmd)
+      return;
+    
+    if (hmd->GetOculusHMD()) {
+      // If the ovrHmd was already destroyed, so were all associated
+      // texture sets
+      ovrHmd_DestroySwapTextureSet(hmd->GetOculusHMD(), textureSet);
+    }
+    hmd = nullptr;
+    textureSet = nullptr;
+  }
+  
+  ~RenderTargetSetOculus() {
+    Destroy();
+  }
+
+  nsRefPtr<HMDInfoOculus> hmd;
+  ovrSwapTextureSet *textureSet;
+};
+
+#ifdef XP_WIN
+class BasicTextureSourceD3D11 : public layers::TextureSourceD3D11
+{
+public:
+  BasicTextureSourceD3D11(ID3D11Texture2D *aTexture, const IntSize& aSize) {
+    mTexture = aTexture;
+    mSize = aSize;
+  }
+};
+
+struct RenderTargetSetD3D11 : public RenderTargetSetOculus
+{
+  RenderTargetSetD3D11(layers::CompositorD3D11 *aCompositor,
+                       const IntSize& aSize,
+                       HMDInfoOculus *aHMD,
+                       ovrSwapTextureSet *aTS)
+    : RenderTargetSetOculus(aSize, aHMD, aTS)
+  {
+    compositor = aCompositor;
+    
+    renderTargets.SetLength(aTS->TextureCount);
+    
+    currentRenderTarget = aTS->CurrentIndex;
+
+    for (int i = 0; i < aTS->TextureCount; ++i) {
+      ovrD3D11Texture *tex11;
+      nsRefPtr<layers::CompositingRenderTargetD3D11> rt;
+      
+      tex11 = (ovrD3D11Texture*)&aTS->Textures[i];
+      rt = new layers::CompositingRenderTargetD3D11(tex11->D3D11.pTexture, IntPoint(0, 0));
+      rt->SetSize(size);
+      renderTargets[i] = rt;
+    }
+  }
+};
+#endif
+
+already_AddRefed<VRHMDRenderingSupport::RenderTargetSet>
+HMDInfoOculus::CreateRenderTargetSet(layers::Compositor *aCompositor, const IntSize& aSize)
+{
+#ifdef XP_WIN
+  if (aCompositor->GetBackendType() == layers::LayersBackend::LAYERS_D3D11)
+  {
+    layers::CompositorD3D11 *comp11 = static_cast<layers::CompositorD3D11*>(aCompositor);
+
+    CD3D11_TEXTURE2D_DESC desc(DXGI_FORMAT_B8G8R8A8_UNORM, aSize.width, aSize.height, 1, 1,
+                               D3D11_BIND_SHADER_RESOURCE | D3D11_BIND_RENDER_TARGET);
+    ovrSwapTextureSet *ts = nullptr;
+    
+    ovrResult orv = ovrHmd_CreateSwapTextureSetD3D11(mHMD, comp11->GetDevice(), &desc, &ts);
+    if (orv != ovrSuccess) {
+      return nullptr;
+    }
+
+    nsRefPtr<RenderTargetSetD3D11> rts = new RenderTargetSetD3D11(comp11, aSize, this, ts);
+    return rts.forget();
+  }
+#endif
+
+  if (aCompositor->GetBackendType() == layers::LayersBackend::LAYERS_OPENGL) {
+  }
+
+  return nullptr;
+}
+
+void
+HMDInfoOculus::DestroyRenderTargetSet(RenderTargetSet *aRTSet)
+{
+  RenderTargetSetOculus *rts = static_cast<RenderTargetSetOculus*>(aRTSet);
+  rts->Destroy();
+}
+
+void
+HMDInfoOculus::SubmitFrame(RenderTargetSet *aRTSet)
+{
+  RenderTargetSetOculus *rts = static_cast<RenderTargetSetOculus*>(aRTSet);
+  MOZ_ASSERT(rts->hmd != nullptr);
+  MOZ_ASSERT(rts->textureSet != nullptr);
+
+  ovrLayerEyeFov layer;
+  layer.Header.Type = ovrLayerType_EyeFov;
+  layer.Header.Flags = 0;
+  layer.ColorTexture[0] = rts->textureSet;
+  layer.ColorTexture[1] = nullptr;
+  layer.Fov[0] = mFOVPort[0];
+  layer.Fov[1] = mFOVPort[1];
+  layer.Viewport[0].Pos.x = 0;
+  layer.Viewport[0].Pos.y = 0;
+  layer.Viewport[0].Size.w = rts->size.width / 2;
+  layer.Viewport[0].Size.h = rts->size.height;
+  layer.Viewport[1].Pos.x = rts->size.width / 2;
+  layer.Viewport[1].Pos.y = 0;
+  layer.Viewport[1].Size.w = rts->size.width / 2;
+  layer.Viewport[1].Size.h = rts->size.height;
+
+  const Point3D& l = rts->hmd->mEyeTranslation[0];
+  const Point3D& r = rts->hmd->mEyeTranslation[1];
+  const ovrVector3f hmdToEyeViewOffset[2] = { { l.x, l.y, l.z },
+                                              { r.x, r.y, r.z } };
+  do_CalcEyePoses(rts->hmd->mLastTrackingState.HeadPose.ThePose, hmdToEyeViewOffset, layer.RenderPose);
+
+  ovrLayerHeader *layers = &layer.Header;
+  ovrResult orv = ovrHmd_SubmitFrame(mHMD, 0, nullptr, &layers, 1);
+  //printf_stderr("Submitted frame %d, result: %d\n", rts->textureSet->CurrentIndex, orv);
+  if (orv != ovrSuccess) {
+    // not visible? failed?
+  }
+}
+
 bool
 VRHMDManagerOculus::PlatformInit()
 {
   if (mOculusPlatformInitialized)
     return true;
 
   if (!gfxPrefs::VREnabled())
     return false;
 
   if (!InitializeOculusCAPI())
     return false;
 
   ovrInitParams params;
   params.Flags = ovrInit_RequestVersion;
-  params.RequestedMinorVersion = LIBOVR_MINOR_VERSION;
+  params.RequestedMinorVersion = OVR_MINOR_VERSION;
   params.LogCallback = nullptr;
   params.ConnectionTimeoutMS = 0;
 
-  bool ok = ovr_Initialize(&params);
+  ovrResult orv = ovr_Initialize(&params);
 
-  if (!ok)
+  if (orv != ovrSuccess)
     return false;
 
   mOculusPlatformInitialized = true;
   return true;
 }
 
 bool
 VRHMDManagerOculus::Init()
 {
   if (mOculusInitialized)
     return true;
 
   if (!PlatformInit())
     return false;
 
+  ovrResult orv;
   int count = ovrHmd_Detect();
-
+  
   for (int i = 0; i < count; ++i) {
-    ovrHmd hmd = ovrHmd_Create(i);
-    if (hmd) {
+    ovrHmd hmd;
+    orv = ovrHmd_Create(i, &hmd);
+    if (orv == ovrSuccess) {
       nsRefPtr<HMDInfoOculus> oc = new HMDInfoOculus(hmd);
       mOculusHMDs.AppendElement(oc);
     }
   }
 
   // VRAddTestDevices == 1: add test device only if no real devices present
   // VRAddTestDevices == 2: add test device always
   if ((count == 0 && gfxPrefs::VRAddTestDevices() == 1) ||
       (gfxPrefs::VRAddTestDevices() == 2))
   {
-    ovrHmd hmd = ovrHmd_CreateDebug(ovrHmd_DK2);
-    if (hmd) {
+    ovrHmd hmd;
+    orv = ovrHmd_CreateDebug(ovrHmd_DK2, &hmd);
+    if (orv == ovrSuccess) {
       nsRefPtr<HMDInfoOculus> oc = new HMDInfoOculus(hmd);
       mOculusHMDs.AppendElement(oc);
     }
   }
 
   mOculusInitialized = true;
   return true;
 }
--- a/gfx/vr/gfxVROculus.h
+++ b/gfx/vr/gfxVROculus.h
@@ -10,41 +10,52 @@
 #include "nsIScreen.h"
 #include "nsCOMPtr.h"
 #include "nsRefPtr.h"
 
 #include "mozilla/gfx/2D.h"
 #include "mozilla/EnumeratedArray.h"
 
 #include "gfxVR.h"
+//#include <OVR_CAPI.h>
+//#include <OVR_CAPI_D3D.h>
 #include "ovr_capi_dynamic.h"
 
 namespace mozilla {
 namespace gfx {
 namespace impl {
 
-class HMDInfoOculus : public VRHMDInfo {
+class HMDInfoOculus : public VRHMDInfo, public VRHMDRenderingSupport {
 public:
   explicit HMDInfoOculus(ovrHmd aHMD);
 
   bool SetFOV(const VRFieldOfView& aFOVLeft, const VRFieldOfView& aFOVRight,
               double zNear, double zFar) override;
 
   bool StartSensorTracking() override;
   VRHMDSensorState GetSensorState(double timeOffset) override;
   void StopSensorTracking() override;
   void ZeroSensor() override;
 
   void FillDistortionConstants(uint32_t whichEye,
                                const IntSize& textureSize, const IntRect& eyeViewport,
                                const Size& destViewport, const Rect& destRect,
                                VRDistortionConstants& values) override;
 
+  VRHMDRenderingSupport* GetRenderingSupport() override { return this; }
+  
   void Destroy();
 
+  /* VRHMDRenderingSupport */
+  already_AddRefed<RenderTargetSet> CreateRenderTargetSet(layers::Compositor *aCompositor, const IntSize& aSize) override;
+  void DestroyRenderTargetSet(RenderTargetSet *aRTSet) override;
+  void SubmitFrame(RenderTargetSet *aRTSet) override;
+
+  ovrHmd GetOculusHMD() const { return mHMD; }
+
 protected:
   // must match the size of VRDistortionVertex
   struct DistortionVertex {
     float pos[2];
     float texR[2];
     float texG[2];
     float texB[2];
     float genericAttribs[4];
@@ -53,16 +64,17 @@ protected:
   virtual ~HMDInfoOculus() {
       Destroy();
       MOZ_COUNT_DTOR_INHERITED(HMDInfoOculus, VRHMDInfo);
   }
 
   ovrHmd mHMD;
   ovrFovPort mFOVPort[2];
   uint32_t mStartCount;
+  ovrTrackingState mLastTrackingState;
 };
 
 } // namespace impl
 
 class VRHMDManagerOculus : public VRHMDManager
 {
 public:
   VRHMDManagerOculus()
--- a/gfx/vr/moz.build
+++ b/gfx/vr/moz.build
@@ -13,16 +13,26 @@ LOCAL_INCLUDES += [
 ]
 
 UNIFIED_SOURCES += [
     'gfxVR.cpp',
     'gfxVRCardboard.cpp',
     'gfxVROculus.cpp',
 ]
 
+# For building with the real SDK instead of our local hack
+#SOURCES += [
+#    'OVR_CAPI_Util.cpp',
+#    'OVR_CAPIShim.c',
+#    'OVR_StereoProjection.cpp',
+#]
+#
+#CXXFLAGS += ["-Ic:/proj/ovr/OculusSDK-0.6.0-beta/LibOVR/Include"]
+#CFLAGS += ["-Ic:/proj/ovr/OculusSDK-0.6.0-beta/LibOVR/Include"]
+
 CXXFLAGS += CONFIG['MOZ_CAIRO_CFLAGS']
 CXXFLAGS += CONFIG['TK_CFLAGS']
 CFLAGS += CONFIG['MOZ_CAIRO_CFLAGS']
 CFLAGS += CONFIG['TK_CFLAGS']
 
 FAIL_ON_WARNINGS = not CONFIG['_MSC_VER']
 
 include('/ipc/chromium/chromium-config.mozbuild')
--- a/gfx/vr/ovr_capi_dynamic.h
+++ b/gfx/vr/ovr_capi_dynamic.h
@@ -6,26 +6,38 @@
 /* This file contains just the needed struct definitions for
  * interacting with the Oculus VR C API, without needing to #include
  * OVR_CAPI.h directly.  Note that it uses the same type names as the
  * CAPI, and cannot be #included at the same time as OVR_CAPI.h.  It
  * does not include the entire C API, just want's needed.
  */
 
 #ifdef OVR_CAPI_h
-#warning OVR_CAPI.h included before ovr_capi_dynamic.h, skpping this
+#ifdef _MSC_VER
+#pragma message("ovr_capi_dyanmic.h: OVR_CAPI.h included before ovr_capi_dynamic.h, skipping this")
+#else
+#warning OVR_CAPI.h included before ovr_capi_dynamic.h, skipping this
+#endif
 #define mozilla_ovr_capi_dynamic_h_
 
 #else
 
 #ifndef mozilla_ovr_capi_dynamic_h_
 #define mozilla_ovr_capi_dynamic_h_
 
 #define OVR_CAPI_LIMITED_MOZILLA 1
 
+#ifdef HAVE_64BIT_BUILD
+#define OVR_PTR_SIZE 8
+#define OVR_ON64(x)     x
+#else
+#define OVR_PTR_SIZE 4
+#define OVR_ON64(x)     /**/
+#endif
+
 #if defined(_WIN32)
 #define OVR_PFN __cdecl
 #else
 #define OVR_PFN
 #endif
 
 #if !defined(OVR_ALIGNAS)
 #if defined(__GNUC__) && (((__GNUC__ * 100) + __GNUC_MINOR__) >= 408) && (defined(__GXX_EXPERIMENTAL_CXX0X__) || (__cplusplus >= 201103L))
@@ -46,16 +58,17 @@
 #error Need to define OVR_ALIGNAS
 #endif
 #endif
 
 #ifdef __cplusplus 
 extern "C" {
 #endif
 
+typedef int32_t ovrResult;
 typedef char ovrBool;
 typedef struct { int x, y; } ovrVector2i;
 typedef struct { int w, h; } ovrSizei;
 typedef struct { ovrVector2i Pos; ovrSizei Size; } ovrRecti;
 typedef struct { float x, y, z, w; } ovrQuatf;
 typedef struct { float x, y; } ovrVector2f;
 typedef struct { float x, y, z; } ovrVector3f;
 typedef struct { float M[4][4]; } ovrMatrix4f;
@@ -89,204 +102,286 @@ typedef enum {
   ovrHmd_DK2              = 6,
   ovrHmd_BlackStar        = 7,
   ovrHmd_CB               = 8,
   ovrHmd_Other            = 9,
   ovrHmd_EnumSize         = 0x7fffffff
 } ovrHmdType;
 
 typedef enum {
-  ovrHmdCap_Present           = 0x0001,
-  ovrHmdCap_Available         = 0x0002,
-  ovrHmdCap_Captured          = 0x0004,
-  ovrHmdCap_ExtendDesktop     = 0x0008,
   ovrHmdCap_DebugDevice       = 0x0010,
-  ovrHmdCap_DisplayOff        = 0x0040,
   ovrHmdCap_LowPersistence    = 0x0080,
   ovrHmdCap_DynamicPrediction = 0x0200,
   ovrHmdCap_NoVSync           = 0x1000,
-  ovrHmdCap_NoMirrorToWindow  = 0x2000
+  ovrHmdCap_EnumSize          = 0x7fffffff
 } ovrHmdCapBits;
 
 typedef enum
 {
   ovrTrackingCap_Orientation      = 0x0010,
   ovrTrackingCap_MagYawCorrection = 0x0020,
   ovrTrackingCap_Position         = 0x0040,
   ovrTrackingCap_Idle             = 0x0100,
   ovrTrackingCap_EnumSize         = 0x7fffffff
 } ovrTrackingCaps;
 
 typedef enum {
-  ovrDistortionCap_Chromatic = 0x01,
-  ovrDistortionCap_TimeWarp  = 0x02,
-  ovrDistortionCap_Vignette  = 0x08,
-  ovrDistortionCap_NoRestore = 0x10,
-  ovrDistortionCap_FlipInput = 0x20,
-  ovrDistortionCap_SRGB      = 0x40,
-  ovrDistortionCap_Overdrive = 0x80,
-  ovrDistortionCap_HqDistortion = 0x100,
-  ovrDistortionCap_LinuxDevFullscreen = 0x200,
-  ovrDistortionCap_ComputeShader = 0x400,
-  ovrDistortionCap_TimewarpJitDelay = 0x1000,
-  ovrDistortionCap_ProfileNoSpinWaits = 0x10000,
-  ovrDistortionCap_EnumSize = 0x7fffffff
-} ovrDistortionCaps;
-
-typedef enum {
   ovrEye_Left  = 0,
   ovrEye_Right = 1,
   ovrEye_Count = 2,
   ovrEye_EnumSize = 0x7fffffff
 } ovrEyeType;
 
-typedef struct ovrHmdDesc_ {
+typedef struct OVR_ALIGNAS(OVR_PTR_SIZE) {
   void* Handle;
   ovrHmdType  Type;
-  const char* ProductName;    
+  OVR_ON64(uint32_t pad0;)
+  const char* ProductName;
   const char* Manufacturer;
   short VendorId;
   short ProductId;
   char SerialNumber[24];
   short FirmwareMajor;
   short FirmwareMinor;
   float CameraFrustumHFovInRadians;
   float CameraFrustumVFovInRadians;
   float CameraFrustumNearZInMeters;
   float CameraFrustumFarZInMeters;
 
   unsigned int HmdCaps;
   unsigned int TrackingCaps;
-  unsigned int DistortionCaps;
 
   ovrFovPort  DefaultEyeFov[ovrEye_Count];
   ovrFovPort  MaxEyeFov[ovrEye_Count];
   ovrEyeType  EyeRenderOrder[ovrEye_Count];
 
   ovrSizei    Resolution;
-  ovrVector2i WindowsPos;
-
-  const char* DisplayDeviceName;
-  int         DisplayId;
 } ovrHmdDesc;
 
 typedef const ovrHmdDesc* ovrHmd;
 
 typedef enum {
   ovrStatus_OrientationTracked    = 0x0001,
   ovrStatus_PositionTracked       = 0x0002,
   ovrStatus_CameraPoseTracked     = 0x0004,
   ovrStatus_PositionConnected     = 0x0020,
   ovrStatus_HmdConnected          = 0x0080,
   ovrStatus_EnumSize              = 0x7fffffff
 } ovrStatusBits;
 
-typedef struct ovrSensorData_ {
+typedef struct OVR_ALIGNAS(4) {
   ovrVector3f    Accelerometer;
   ovrVector3f    Gyro;
   ovrVector3f    Magnetometer;
   float          Temperature;
   float          TimeInSeconds;
 } ovrSensorData;
 
 
-typedef struct ovrTrackingState_ {
+typedef struct OVR_ALIGNAS(8) {
   ovrPoseStatef HeadPose;
   ovrPosef CameraPose;
   ovrPosef LeveledCameraPose;
   ovrSensorData RawSensorData;
   unsigned int StatusFlags;
-  double LastVisionProcessingTime;
   uint32_t LastCameraFrameCounter;
-  uint32_t Pad;
+  uint32_t pad0;
 } ovrTrackingState;
 
-typedef struct OVR_ALIGNAS(8) ovrFrameTiming_ {
-  float DeltaSeconds;
-  float Pad; 
-  double ThisFrameSeconds;
-  double TimewarpPointSeconds;
-  double NextFrameSeconds;
-  double ScanoutMidpointSeconds;
-  double EyeScanoutSeconds[2];    
+typedef struct OVR_ALIGNAS(8) {
+  double DisplayMidpointSeconds;
+  double FrameIntervalSeconds;
+  unsigned AppFrameIndex;
+  unsigned DisplayFrameIndex;
 } ovrFrameTiming;
 
-typedef struct ovrEyeRenderDesc_ {
+typedef struct OVR_ALIGNAS(4) {
   ovrEyeType  Eye;
   ovrFovPort  Fov;
   ovrRecti DistortedViewport;
   ovrVector2f PixelsPerTanAngleAtCenter;
   ovrVector3f HmdToEyeViewOffset;
 } ovrEyeRenderDesc;
 
-typedef struct ovrDistortionVertex_ {
-  ovrVector2f ScreenPosNDC;
-  float       TimeWarpFactor;
-  float       VignetteFactor;
-  ovrVector2f TanEyeAnglesR;
-  ovrVector2f TanEyeAnglesG;
-  ovrVector2f TanEyeAnglesB;    
-} ovrDistortionVertex;
+typedef struct OVR_ALIGNAS(4) {
+  float Projection22;
+  float Projection23;
+  float Projection32;
+} ovrTimewarpProjectionDesc;
+
+typedef struct OVR_ALIGNAS(4) {
+  ovrVector3f HmdToEyeViewOffset[ovrEye_Count];
+  float HmdSpaceToWorldScaleInMeters;
+} ovrViewScaleDesc;
 
-typedef struct ovrDistortionMesh_ {
-  ovrDistortionVertex* pVertexData;
-  unsigned short*      pIndexData;
-  unsigned int         VertexCount;
-  unsigned int         IndexCount;
-} ovrDistortionMesh;
+typedef enum {
+    ovrRenderAPI_None,
+    ovrRenderAPI_OpenGL,
+    ovrRenderAPI_Android_GLES,
+    ovrRenderAPI_D3D9_Obsolete,
+    ovrRenderAPI_D3D10_Obsolete,
+    ovrRenderAPI_D3D11,
+    ovrRenderAPI_Count,
+    ovrRenderAPI_EnumSize = 0x7fffffff
+} ovrRenderAPIType;
+
+typedef struct OVR_ALIGNAS(4) {
+  ovrRenderAPIType API;
+  ovrSizei TextureSize;
+} ovrTextureHeader;
+
+typedef struct OVR_ALIGNAS(OVR_PTR_SIZE) {
+  ovrTextureHeader Header;
+  OVR_ON64(uint32_t pad0;)
+  uintptr_t PlatformData[8];
+} ovrTexture;
+
+typedef struct OVR_ALIGNAS(OVR_PTR_SIZE) {
+  ovrTexture* Textures;
+  int TextureCount;
+  int CurrentIndex;
+} ovrSwapTextureSet;
 
 typedef enum {
   ovrInit_Debug          = 0x00000001,
   ovrInit_ServerOptional = 0x00000002,
   ovrInit_RequestVersion = 0x00000004,
-  ovrInit_ForceNoDebug   = 0x00000008
+  ovrInit_ForceNoDebug   = 0x00000008,
+  ovrInit_EnumSize       = 0x7fffffff
 } ovrInitFlags;
 
 typedef enum {
   ovrLogLevel_Debug = 0,
   ovrLogLevel_Info  = 1,
-  ovrLogLevel_Error = 2
+  ovrLogLevel_Error = 2,
+  ovrLogLevel_EnumSize = 0x7fffffff
 } ovrLogLevel;
 
+typedef enum {
+  ovrLayerType_Disabled       = 0,
+  ovrLayerType_EyeFov         = 1,
+  ovrLayerType_EyeFovDepth    = 2,
+  ovrLayerType_QuadInWorld    = 3,
+  ovrLayerType_QuadHeadLocked = 4,
+  ovrLayerType_Direct         = 6,
+  ovrLayerType_EnumSize       = 0x7fffffff
+} ovrLayerType;
+
+typedef enum {
+  ovrLayerFlag_HighQuality               = 0x01,
+  ovrLayerFlag_TextureOriginAtBottomLeft = 0x02
+} ovrLayerFlags;
+
+typedef struct OVR_ALIGNAS(OVR_PTR_SIZE) {
+    ovrLayerType    Type;
+    unsigned        Flags;
+} ovrLayerHeader;
+
+typedef struct OVR_ALIGNAS(OVR_PTR_SIZE) {
+    ovrLayerHeader      Header;
+    ovrSwapTextureSet*  ColorTexture[ovrEye_Count];
+    ovrRecti            Viewport[ovrEye_Count];
+    ovrFovPort          Fov[ovrEye_Count];
+    ovrPosef            RenderPose[ovrEye_Count];
+} ovrLayerEyeFov;
+
 typedef void (OVR_PFN *ovrLogCallback)(int level, const char* message);
 
-typedef struct {
+typedef struct OVR_ALIGNAS(8) {
   uint32_t Flags;
   uint32_t RequestedMinorVersion;
   ovrLogCallback LogCallback;
   uint32_t ConnectionTimeoutMS;
+  OVR_ON64(uint32_t pad0;)
 } ovrInitParams;
 
-typedef ovrBool (OVR_PFN *pfn_ovr_Initialize)(ovrInitParams const* params);
+enum {
+  ovrSuccess = 0,
+
+  ovrError_MemoryAllocationFailure = -1000,
+  ovrError_SocketCreationFailure   = -1001,
+  ovrError_InvalidHmd              = -1002,
+  ovrError_Timeout                 = -1003,
+  ovrError_NotInitialized          = -1004,
+  ovrError_InvalidParameter        = -1005,
+  ovrError_ServiceError            = -1006,
+  ovrError_NoHmd                   = -1007,
+
+  ovrError_AudioReservedBegin      = -2000,
+  ovrError_AudioReservedEnd        = -2999,
+
+  ovrError_Initialize              = -3000,
+  ovrError_LibLoad                 = -3001,
+  ovrError_LibVersion              = -3002,
+  ovrError_ServiceConnection       = -3003,
+  ovrError_ServiceVersion          = -3004,
+  ovrError_IncompatibleOS          = -3005,
+  ovrError_DisplayInit             = -3006,
+  ovrError_ServerStart             = -3007,
+  ovrError_Reinitialization        = -3008,
+
+  ovrError_InvalidBundleAdjustment = -4000,
+  ovrError_USBBandwidth            = -4001
+};
+
+typedef ovrResult (OVR_PFN *pfn_ovr_Initialize)(ovrInitParams const* params);
 typedef void (OVR_PFN *pfn_ovr_Shutdown)();
-typedef int (OVR_PFN *pfn_ovrHmd_Detect)();
-typedef ovrHmd (OVR_PFN *pfn_ovrHmd_Create)(int index);
+typedef double (OVR_PFN *pfn_ovr_GetTimeInSeconds)();
+  
+typedef ovrResult (OVR_PFN *pfn_ovrHmd_Detect)();
+typedef ovrResult (OVR_PFN *pfn_ovrHmd_Create)(int index, ovrHmd*);
+typedef ovrResult (OVR_PFN *pfn_ovrHmd_CreateDebug)(ovrHmdType type, ovrHmd*);
 typedef void (OVR_PFN *pfn_ovrHmd_Destroy)(ovrHmd hmd);
-typedef ovrHmd (OVR_PFN *pfn_ovrHmd_CreateDebug)(ovrHmdType type);
-typedef const char* (OVR_PFN *pfn_ovrHmd_GetLastError)(ovrHmd hmd);
-typedef ovrBool (OVR_PFN *pfn_ovrHmd_AttachToWindow)(ovrHmd hmd, void* window, const ovrRecti* destMirrorRect, const ovrRecti* sourceRenderTargetRect);
-typedef unsigned int (OVR_PFN *pfn_ovrHmd_GetEnabledCaps)(ovrHmd hmd);
-typedef void (OVR_PFN *pfn_ovrHmd_SetEnabledCaps)(ovrHmd hmd, unsigned int hmdCaps);
-typedef ovrBool (OVR_PFN *pfn_ovrHmd_ConfigureTracking)(ovrHmd hmd, unsigned int supportedTrackingCaps, unsigned int requiredTrackingCaps); 
+  
+typedef ovrResult (OVR_PFN *pfn_ovrHmd_ConfigureTracking)(ovrHmd hmd, unsigned int supportedTrackingCaps, unsigned int requiredTrackingCaps); 
 typedef void (OVR_PFN *pfn_ovrHmd_RecenterPose)(ovrHmd hmd);
 typedef ovrTrackingState (OVR_PFN *pfn_ovrHmd_GetTrackingState)(ovrHmd hmd, double absTime);
 typedef ovrSizei (OVR_PFN *pfn_ovrHmd_GetFovTextureSize)(ovrHmd hmd, ovrEyeType eye, ovrFovPort fov, float pixelsPerDisplayPixel);
 typedef ovrEyeRenderDesc (OVR_PFN *pfn_ovrHmd_GetRenderDesc)(ovrHmd hmd, ovrEyeType eyeType, ovrFovPort fov);
-typedef ovrBool (OVR_PFN *pfn_ovrHmd_CreateDistortionMesh)(ovrHmd hmd, ovrEyeType eyeType, ovrFovPort fov, unsigned int distortionCaps, ovrDistortionMesh *meshData);
-typedef void (OVR_PFN *pfn_ovrHmd_DestroyDistortionMesh)(ovrDistortionMesh* meshData);
-typedef void (OVR_PFN *pfn_ovrHmd_GetRenderScaleAndOffset)(ovrFovPort fov, ovrSizei textureSize, ovrRecti renderViewport, ovrVector2f uvScaleOffsetOut[2]);
-typedef ovrFrameTiming (OVR_PFN *pfn_ovrHmd_GetFrameTiming)(ovrHmd hmd, unsigned int frameIndex);
-typedef ovrFrameTiming (OVR_PFN *pfn_ovrHmd_BeginFrameTiming)(ovrHmd hmd, unsigned int frameIndex);
-typedef void (OVR_PFN *pfn_ovrHmd_EndFrameTiming)(ovrHmd hmd);
-typedef void (OVR_PFN *pfn_ovrHmd_ResetFrameTiming)(ovrHmd hmd, unsigned int frameIndex, bool vsync);
-typedef void (OVR_PFN *pfn_ovrHmd_GetEyePoses)(ovrHmd hmd, unsigned int frameIndex, ovrVector3f hmdToEyeViewOffset[2], ovrPosef outEyePoses[2], ovrTrackingState* outHmdTrackingState);
-typedef ovrPosef (OVR_PFN *pfn_ovrHmd_GetHmdPosePerEye)(ovrHmd hmd, ovrEyeType eye);
-typedef void (OVR_PFN *pfn_ovrHmd_GetEyeTimewarpMatrices)(ovrHmd hmd, ovrEyeType eye, ovrPosef renderPose, ovrMatrix4f twmOut[2]);
-typedef ovrMatrix4f (OVR_PFN *pfn_ovrMatrix4f_Projection) (ovrFovPort fov, float znear, float zfar, ovrBool rightHanded );
-typedef ovrMatrix4f (OVR_PFN *pfn_ovrMatrix4f_OrthoSubProjection) (ovrFovPort fov, ovrVector2f orthoScale, float orthoDistance, float eyeViewAdjustX);
-typedef double (OVR_PFN *pfn_ovr_GetTimeInSeconds)();
+
+typedef void (OVR_PFN *pfn_ovrHmd_DestroySwapTextureSet)(ovrHmd hmd, ovrSwapTextureSet* textureSet);
+typedef ovrResult (OVR_PFN *pfn_ovrHmd_SubmitFrame)(ovrHmd hmd, unsigned int frameIndex,
+                                                    const ovrViewScaleDesc* viewScaleDesc,
+                                                    ovrLayerHeader const * const * layerPtrList, unsigned int layerCount);
+
+#ifdef XP_WIN
+struct D3D11_TEXTURE2D_DESC;
+struct ID3D11Device;
+struct ID3D11Texture2D;
+struct ID3D11ShaderResourceView;
+
+typedef struct OVR_ALIGNAS(OVR_PTR_SIZE) {
+    ovrTextureHeader          Header;
+    OVR_ON64(uint32_t pad0;)
+    ID3D11Texture2D*          pTexture;
+    ID3D11ShaderResourceView* pSRView;
+} ovrD3D11TextureData;
+
+typedef union {
+    ovrTexture          Texture;
+    ovrD3D11TextureData D3D11;
+} ovrD3D11Texture;
+
+typedef ovrResult (OVR_PFN *pfn_ovrHmd_CreateSwapTextureSetD3D11)(ovrHmd hmd, ID3D11Device* device,
+                                                                  const D3D11_TEXTURE2D_DESC* desc,
+                                                                  ovrSwapTextureSet** outTextureSet);
+#endif
+
+typedef struct {
+    ovrTextureHeader Header;
+    uint32_t TexId;
+} ovrGLTextureData;
+
+typedef union {
+    ovrTexture       Texture;
+    ovrGLTextureData OGL;
+} ovrGLTexture;
+
+typedef ovrResult (OVR_PFN *pfn_ovrHmd_CreateSwapTextureSetGL)(ovrHmd hmd, uint32_t format,
+                                                               int width, int height,
+                                                               ovrSwapTextureSet** outTextureSet);
 
 #ifdef __cplusplus 
 }
 #endif
 
 #endif /* mozilla_ovr_capi_dynamic_h_ */
 #endif /* OVR_CAPI_h */
--- a/modules/libpref/init/all.js
+++ b/modules/libpref/init/all.js
@@ -4577,16 +4577,19 @@ pref("jsloader.reuseGlobal", false);
 pref("dom.browserElement.maxScreenshotDelayMS", 2000);
 
 // Whether we should show the placeholder when the element is focused but empty.
 pref("dom.placeholder.show_on_focus", true);
 
 pref("dom.vr.enabled", false);
 // 0 = never; 1 = only if real devices aren't there; 2 = always
 pref("dom.vr.add-test-devices", 1);
+// true = show the VR textures in our compositing output; false = don't.
+// true might have performance impact
+pref("gfx.vr.mirror-textures", false);
 
 // MMS UA Profile settings
 pref("wap.UAProf.url", "");
 pref("wap.UAProf.tagname", "x-wap-profile");
 
 // MMS version 1.1 = 0x11 (or decimal 17)
 // MMS version 1.3 = 0x13 (or decimal 19)
 // @see OMA-TS-MMS_ENC-V1_3-20110913-A clause 7.3.34