summaryrefslogtreecommitdiffstats
path: root/src/oculusvr
diff options
context:
space:
mode:
Diffstat (limited to 'src/oculusvr')
-rw-r--r--src/oculusvr/classes/com/jogamp/oculusvr/OVRDynamicLibraryBundleInfo.java6
-rw-r--r--src/oculusvr/classes/com/jogamp/oculusvr/OVRException.java43
-rw-r--r--src/oculusvr/classes/com/jogamp/oculusvr/OVRVersion.java7
-rw-r--r--src/oculusvr/classes/com/jogamp/opengl/oculusvr/OVRSBSRendererDualFBO.java232
-rw-r--r--src/oculusvr/classes/com/jogamp/opengl/oculusvr/OVRSBSRendererSingleFBO.java206
-rw-r--r--src/oculusvr/classes/jogamp/opengl/oculusvr/OVRDistortion.java638
-rw-r--r--src/oculusvr/classes/jogamp/opengl/oculusvr/OVRUtil.java111
-rw-r--r--src/oculusvr/classes/jogamp/opengl/oculusvr/shader/dist01_chroma.fp26
-rw-r--r--src/oculusvr/classes/jogamp/opengl/oculusvr/shader/dist01_chroma.vp33
-rw-r--r--src/oculusvr/classes/jogamp/opengl/oculusvr/shader/dist01_plain.fp22
-rw-r--r--src/oculusvr/classes/jogamp/opengl/oculusvr/shader/dist01_plain.vp27
-rw-r--r--src/oculusvr/classes/jogamp/opengl/oculusvr/shader/dist01_timewarp.vp44
-rw-r--r--src/oculusvr/classes/jogamp/opengl/oculusvr/shader/dist01_timewarp_chroma.vp65
13 files changed, 1438 insertions, 22 deletions
diff --git a/src/oculusvr/classes/com/jogamp/oculusvr/OVRDynamicLibraryBundleInfo.java b/src/oculusvr/classes/com/jogamp/oculusvr/OVRDynamicLibraryBundleInfo.java
index 6bd9d1a64..8eafb9927 100644
--- a/src/oculusvr/classes/com/jogamp/oculusvr/OVRDynamicLibraryBundleInfo.java
+++ b/src/oculusvr/classes/com/jogamp/oculusvr/OVRDynamicLibraryBundleInfo.java
@@ -1,5 +1,5 @@
/**
- * Copyright 2010 JogAmp Community. All rights reserved.
+ * Copyright 2014 JogAmp Community. All rights reserved.
*
* Redistribution and use in source and binary forms, with or without modification, are
* permitted provided that the following conditions are met:
@@ -95,12 +95,12 @@ import java.util.*;
}
@Override
- public final long toolGetProcAddress(long toolGetProcAddressHandle, String funcName) {
+ public final long toolGetProcAddress(final long toolGetProcAddressHandle, final String funcName) {
return 0;
}
@Override
- public final boolean useToolGetProcAdressFirst(String funcName) {
+ public final boolean useToolGetProcAdressFirst(final String funcName) {
return false;
}
diff --git a/src/oculusvr/classes/com/jogamp/oculusvr/OVRException.java b/src/oculusvr/classes/com/jogamp/oculusvr/OVRException.java
index e65338025..f2a254e14 100644
--- a/src/oculusvr/classes/com/jogamp/oculusvr/OVRException.java
+++ b/src/oculusvr/classes/com/jogamp/oculusvr/OVRException.java
@@ -1,22 +1,29 @@
/**
- * Copyright (C) 2014 JogAmp Community. All rights reserved.
+ * Copyright 2014 JogAmp Community. All rights reserved.
*
- * Permission is hereby granted, free of charge, to any person obtaining a
- * copy of this software and associated documentation files (the "Software"),
- * to deal in the Software without restriction, including without limitation
- * the rights to use, copy, modify, merge, publish, distribute, sublicense,
- * and/or sell copies of the Software, and to permit persons to whom the
- * Software is furnished to do so, subject to the following conditions:
+ * Redistribution and use in source and binary forms, with or without modification, are
+ * permitted provided that the following conditions are met:
*
- * The above copyright notice and this permission notice shall be included
- * in all copies or substantial portions of the Software.
+ * 1. Redistributions of source code must retain the above copyright notice, this list of
+ * conditions and the following disclaimer.
*
- * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
- * OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
- * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
- * BRIAN PAUL BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN
- * AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
- * CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+ * 2. Redistributions in binary form must reproduce the above copyright notice, this list
+ * of conditions and the following disclaimer in the documentation and/or other materials
+ * provided with the distribution.
+ *
+ * THIS SOFTWARE IS PROVIDED BY JogAmp Community ``AS IS'' AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND
+ * FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL JogAmp Community OR
+ * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+ * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
+ * SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON
+ * ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
+ * NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
+ * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ *
+ * The views and conclusions contained in the software and documentation are those of the
+ * authors and should not be interpreted as representing official policies, either expressed
+ * or implied, of JogAmp Community.
*/
package com.jogamp.oculusvr;
@@ -32,19 +39,19 @@ public class OVRException extends RuntimeException {
/** Constructs an ALException object with the specified detail
message. */
- public OVRException(String message) {
+ public OVRException(final String message) {
super(message);
}
/** Constructs an ALException object with the specified detail
message and root cause. */
- public OVRException(String message, Throwable cause) {
+ public OVRException(final String message, final Throwable cause) {
super(message, cause);
}
/** Constructs an ALException object with the specified root
cause. */
- public OVRException(Throwable cause) {
+ public OVRException(final Throwable cause) {
super(cause);
}
}
diff --git a/src/oculusvr/classes/com/jogamp/oculusvr/OVRVersion.java b/src/oculusvr/classes/com/jogamp/oculusvr/OVRVersion.java
index 25db640e4..eebd771d8 100644
--- a/src/oculusvr/classes/com/jogamp/oculusvr/OVRVersion.java
+++ b/src/oculusvr/classes/com/jogamp/oculusvr/OVRVersion.java
@@ -29,10 +29,15 @@
package com.jogamp.oculusvr;
import com.jogamp.common.GlueGenVersion;
-
import com.jogamp.common.os.Platform;
import com.jogamp.common.util.VersionUtil;
import com.jogamp.common.util.JogampVersion;
+import com.jogamp.oculusvr.OVR;
+import com.jogamp.oculusvr.OvrHmdContext;
+import com.jogamp.oculusvr.ovrHmdDesc;
+import com.jogamp.oculusvr.ovrSensorDesc;
+import com.jogamp.oculusvr.ovrSizei;
+import com.jogamp.oculusvr.ovrVector2i;
import java.util.jar.Manifest;
diff --git a/src/oculusvr/classes/com/jogamp/opengl/oculusvr/OVRSBSRendererDualFBO.java b/src/oculusvr/classes/com/jogamp/opengl/oculusvr/OVRSBSRendererDualFBO.java
new file mode 100644
index 000000000..b10a58842
--- /dev/null
+++ b/src/oculusvr/classes/com/jogamp/opengl/oculusvr/OVRSBSRendererDualFBO.java
@@ -0,0 +1,232 @@
+/**
+ * Copyright 2014 JogAmp Community. All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or without modification, are
+ * permitted provided that the following conditions are met:
+ *
+ * 1. Redistributions of source code must retain the above copyright notice, this list of
+ * conditions and the following disclaimer.
+ *
+ * 2. Redistributions in binary form must reproduce the above copyright notice, this list
+ * of conditions and the following disclaimer in the documentation and/or other materials
+ * provided with the distribution.
+ *
+ * THIS SOFTWARE IS PROVIDED BY JogAmp Community ``AS IS'' AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND
+ * FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL JogAmp Community OR
+ * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+ * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
+ * SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON
+ * ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
+ * NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
+ * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ *
+ * The views and conclusions contained in the software and documentation are those of the
+ * authors and should not be interpreted as representing official policies, either expressed
+ * or implied, of JogAmp Community.
+ */
+package com.jogamp.opengl.oculusvr;
+
+import javax.media.opengl.GL;
+import javax.media.opengl.GL2ES2;
+import javax.media.opengl.GLAutoDrawable;
+import javax.media.opengl.GLEventListener;
+import javax.media.opengl.GLEventListener2;
+
+import jogamp.opengl.oculusvr.OVRDistortion;
+
+import com.jogamp.oculusvr.OVR;
+import com.jogamp.oculusvr.ovrFrameTiming;
+import com.jogamp.opengl.FBObject;
+import com.jogamp.opengl.FBObject.TextureAttachment;
+import com.jogamp.opengl.FBObject.Attachment.Type;
+import com.jogamp.opengl.math.FloatUtil;
+
+/**
+ * OculusVR (OVR) <i>Side By Side</i> Distortion Renderer utilizing {@link OVRDistortion}
+ * implementing {@link GLEventListener} for convenience.
+ * <p>
+ * Implementation renders an {@link GLEventListener2} instance
+ * side-by-side using two {@link FBObject}s according to {@link OVRDistortion}.
+ * </p>
+ */
+public class OVRSBSRendererDualFBO implements GLEventListener {
+
+ private final float[] mat4Projection = new float[16];
+ private final float[] mat4Modelview = new float[16];
+ private final OVRDistortion dist;
+ private final boolean ownsDist;
+ private final GLEventListener2 upstream;
+ private final FBObject[] fbos;
+
+ // final float[] eyePos = { 0.0f, 1.6f, -5.0f };
+ private final float[] eyePos = { 0.0f, 0.0f, -20.0f };
+ // EyePos.y = ovrHmd_GetFloat(HMD, OVR_KEY_EYE_HEIGHT, EyePos.y);
+ private float eyeYaw = FloatUtil.PI; // 180 degrees in radians
+ private float frustumNear = 0.1f;
+ private float frustumFar = 7000f;
+ private int numSamples;
+ private final TextureAttachment[] fboTexs;
+
+ public OVRSBSRendererDualFBO(final OVRDistortion dist, final boolean ownsDist, final GLEventListener2 upstream, final int numSamples) {
+ this.dist = dist;
+ this.ownsDist = ownsDist;
+ this.upstream = upstream;
+ this.numSamples = numSamples;
+ fbos = new FBObject[2];
+ fbos[0] = new FBObject();
+ fbos[1] = new FBObject();
+ fboTexs = new TextureAttachment[2];
+ }
+
+ /**
+ *
+ * @param eyePos
+ * @param eyeYaw
+ * @param frustumNear
+ * @param frustumFar
+ */
+ public void setUpstreamPMVParams(final float[] eyePos, final float eyeYaw, final float frustumNear, final float frustumFar) {
+ System.arraycopy(eyePos, 0, this.eyePos, 0, 3);
+ this.eyeYaw = eyeYaw;
+ this.frustumNear = frustumNear;
+ this.frustumFar = frustumFar;
+ }
+
+ private void initFBOs(final GL gl, final int width, final int height) {
+ // remove all texture attachments, since MSAA uses just color-render-buffer
+ // and non-MSAA uses texture2d-buffer
+ fbos[0].detachAllColorbuffer(gl);
+ fbos[1].detachAllColorbuffer(gl);
+
+ fbos[0].reset(gl, width, height, numSamples, false);
+ fbos[1].reset(gl, width, height, numSamples, false);
+ if(fbos[0].getNumSamples() != fbos[1].getNumSamples()) {
+ throw new InternalError("sample size mismatch: \n\t0: "+fbos[0]+"\n\t1: "+fbos[1]);
+ }
+ numSamples = fbos[0].getNumSamples();
+
+ if(numSamples>0) {
+ fbos[0].attachColorbuffer(gl, 0, true);
+ fbos[0].resetSamplingSink(gl);
+ fbos[1].attachColorbuffer(gl, 0, true);
+ fbos[1].resetSamplingSink(gl);
+ fboTexs[0] = fbos[0].getSamplingSink();
+ fboTexs[1] = fbos[1].getSamplingSink();
+ } else {
+ fboTexs[0] = fbos[0].attachTexture2D(gl, 0, true);
+ fboTexs[1] = fbos[1].attachTexture2D(gl, 0, true);
+ }
+ numSamples=fbos[0].getNumSamples();
+ fbos[0].attachRenderbuffer(gl, Type.DEPTH, 24);
+ fbos[0].unbind(gl);
+ fbos[1].attachRenderbuffer(gl, Type.DEPTH, 24);
+ fbos[1].unbind(gl);
+ }
+
+ @SuppressWarnings("unused")
+ private void resetFBOs(final GL gl, final int width, final int height) {
+ fbos[0].reset(gl, width, height, numSamples, true);
+ fbos[1].reset(gl, width, height, numSamples, true);
+ if(fbos[0].getNumSamples() != fbos[1].getNumSamples()) {
+ throw new InternalError("sample size mismatch: \n\t0: "+fbos[0]+"\n\t1: "+fbos[1]);
+ }
+ numSamples = fbos[0].getNumSamples();
+ if(numSamples>0) {
+ fboTexs[0] = fbos[0].getSamplingSink();
+ fboTexs[1] = fbos[1].getSamplingSink();
+ } else {
+ fboTexs[0] = (TextureAttachment) fbos[0].getColorbuffer(0);
+ fboTexs[1] = (TextureAttachment) fbos[1].getColorbuffer(0);
+ }
+ }
+
+ @Override
+ public void init(final GLAutoDrawable drawable) {
+ final GL2ES2 gl = drawable.getGL().getGL2ES2();
+ dist.init(gl);
+
+ // We will do some offscreen rendering, setup FBO...
+ if( null != upstream ) {
+ final int[] textureSize = dist.textureSize;
+ initFBOs(gl, textureSize[0], textureSize[1]);
+ upstream.init(drawable);
+ }
+
+ gl.setSwapInterval(1);
+ }
+
+ @Override
+ public void dispose(final GLAutoDrawable drawable) {
+ final GL2ES2 gl = drawable.getGL().getGL2ES2();
+ // FIXME complete release
+ if( null != upstream ) {
+ upstream.dispose(drawable);
+ fbos[0].destroy(gl);
+ fbos[1].destroy(gl);
+ }
+ if( ownsDist ) {
+ dist.dispose(gl);
+ }
+ }
+
+ @Override
+ public void display(final GLAutoDrawable drawable) {
+ final ovrFrameTiming frameTiming = OVR.ovrHmd_BeginFrameTiming(dist.hmdCtx, 0);
+
+ final GL2ES2 gl = drawable.getGL().getGL2ES2();
+ if(0 < numSamples) {
+ gl.glEnable(GL.GL_MULTISAMPLE);
+ }
+
+ // FIXME: Instead of setting the viewport,
+ // it's better to change the projection matrix!
+ if( null != upstream ) {
+ for(int eyeNum=0; eyeNum<2; eyeNum++) {
+ // final ovrPosef eyeRenderPose = OVR.ovrHmd_GetEyePose(hmdCtx, eyeNum);
+ // final float[] eyePos = OVRUtil.getVec3f(eyeRenderPose.getPosition());
+ fbos[eyeNum].bind(gl);
+
+ final OVRDistortion.EyeData eyeDist = dist.eyes[eyeNum];
+ final int[] viewport = eyeDist.viewport;
+ gl.glViewport(viewport[0], viewport[1], viewport[2], viewport[3]);
+
+ dist.getSBSUpstreamPMV(eyeNum, eyePos, eyeYaw, frustumNear, frustumFar, mat4Projection, mat4Modelview);
+ upstream.setProjectionModelview(drawable, mat4Projection, mat4Modelview);
+ upstream.display(drawable, eyeNum > 0 ? GLEventListener2.DISPLAY_REPEAT : 0);
+ fbos[eyeNum].unbind(gl);
+ }
+ gl.glViewport(0, 0, drawable.getSurfaceWidth(), drawable.getSurfaceHeight());
+ }
+
+ gl.glClearColor(0.0f, 0.0f, 0.0f, 0.0f);
+ gl.glClear(GL.GL_COLOR_BUFFER_BIT);
+ gl.glActiveTexture(GL.GL_TEXTURE0 + dist.texUnit0.intValue());
+
+ if( null != upstream ) {
+ dist.displayOneEyePre(gl, frameTiming.getTimewarpPointSeconds());
+ fbos[0].use(gl, fboTexs[0]);
+ dist.displayOneEye(gl, 0);
+ fbos[0].unuse(gl);
+ fbos[1].use(gl, fboTexs[1]);
+ dist.displayOneEye(gl, 1);
+ fbos[1].unuse(gl);
+ dist.displayOneEyePost(gl);
+ } else {
+ dist.display(gl, frameTiming.getTimewarpPointSeconds());
+ }
+
+ if( !drawable.getAutoSwapBufferMode() ) {
+ drawable.swapBuffers();
+ }
+ OVR.ovrHmd_EndFrameTiming(dist.hmdCtx);
+ }
+
+ @Override
+ public void reshape(final GLAutoDrawable drawable, final int x, final int y, final int width, final int height) {
+ if( !drawable.getAutoSwapBufferMode() ) {
+ final GL2ES2 gl = drawable.getGL().getGL2ES2();
+ gl.glViewport(0, 0, width, height);
+ }
+ }
+}
diff --git a/src/oculusvr/classes/com/jogamp/opengl/oculusvr/OVRSBSRendererSingleFBO.java b/src/oculusvr/classes/com/jogamp/opengl/oculusvr/OVRSBSRendererSingleFBO.java
new file mode 100644
index 000000000..22b8b37ce
--- /dev/null
+++ b/src/oculusvr/classes/com/jogamp/opengl/oculusvr/OVRSBSRendererSingleFBO.java
@@ -0,0 +1,206 @@
+/**
+ * Copyright 2014 JogAmp Community. All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or without modification, are
+ * permitted provided that the following conditions are met:
+ *
+ * 1. Redistributions of source code must retain the above copyright notice, this list of
+ * conditions and the following disclaimer.
+ *
+ * 2. Redistributions in binary form must reproduce the above copyright notice, this list
+ * of conditions and the following disclaimer in the documentation and/or other materials
+ * provided with the distribution.
+ *
+ * THIS SOFTWARE IS PROVIDED BY JogAmp Community ``AS IS'' AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND
+ * FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL JogAmp Community OR
+ * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+ * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
+ * SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON
+ * ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
+ * NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
+ * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ *
+ * The views and conclusions contained in the software and documentation are those of the
+ * authors and should not be interpreted as representing official policies, either expressed
+ * or implied, of JogAmp Community.
+ */
+package com.jogamp.opengl.oculusvr;
+
+import javax.media.opengl.GL;
+import javax.media.opengl.GL2ES2;
+import javax.media.opengl.GLAutoDrawable;
+import javax.media.opengl.GLEventListener;
+import javax.media.opengl.GLEventListener2;
+
+import jogamp.opengl.oculusvr.OVRDistortion;
+
+import com.jogamp.oculusvr.OVR;
+import com.jogamp.oculusvr.ovrFrameTiming;
+import com.jogamp.opengl.FBObject;
+import com.jogamp.opengl.FBObject.TextureAttachment;
+import com.jogamp.opengl.FBObject.Attachment.Type;
+import com.jogamp.opengl.math.FloatUtil;
+
+/**
+ * OculusVR (OVR) <i>Side By Side</i> Distortion Renderer utilizing {@link OVRDistortion}
+ * implementing {@link GLEventListener} for convenience.
+ * <p>
+ * Implementation renders an {@link GLEventListener2} instance
+ * side-by-side within one {@link FBObject} according to {@link OVRDistortion}.
+ * </p>
+ */
+public class OVRSBSRendererSingleFBO implements GLEventListener {
+
+ private final float[] mat4Projection = new float[16];
+ private final float[] mat4Modelview = new float[16];
+ private final OVRDistortion dist;
+ private final boolean ownsDist;
+ private final GLEventListener2 upstream;
+ private final FBObject fbo0;
+
+ // final float[] eyePos = { 0.0f, 1.6f, -5.0f };
+ private final float[] eyePos = { 0.0f, 0.0f, -20.0f };
+ // EyePos.y = ovrHmd_GetFloat(HMD, OVR_KEY_EYE_HEIGHT, EyePos.y);
+ private float eyeYaw = FloatUtil.PI; // 180 degrees in radians
+ private float frustumNear = 0.1f;
+ private float frustumFar = 7000f;
+ private int numSamples;
+ private TextureAttachment fbo0Tex;
+
+ public OVRSBSRendererSingleFBO(final OVRDistortion dist, final boolean ownsDist, final GLEventListener2 upstream, final int numSamples) {
+ this.dist = dist;
+ this.ownsDist = ownsDist;
+ this.upstream = upstream;
+ this.numSamples = numSamples;
+ fbo0 = new FBObject();
+ }
+
+ /**
+ *
+ * @param eyePos
+ * @param eyeYaw
+ * @param frustumNear
+ * @param frustumFar
+ */
+ public void setUpstreamPMVParams(final float[] eyePos, final float eyeYaw, final float frustumNear, final float frustumFar) {
+ System.arraycopy(eyePos, 0, this.eyePos, 0, 3);
+ this.eyeYaw = eyeYaw;
+ this.frustumNear = frustumNear;
+ this.frustumFar = frustumFar;
+ }
+
+ private void initFBOs(final GL gl, final int width, final int height) {
+ // remove all texture attachments, since MSAA uses just color-render-buffer
+ // and non-MSAA uses texture2d-buffer
+ fbo0.detachAllColorbuffer(gl);
+
+ fbo0.reset(gl, width, height, numSamples, false);
+ numSamples = fbo0.getNumSamples();
+
+ if(numSamples>0) {
+ fbo0.attachColorbuffer(gl, 0, true);
+ fbo0.resetSamplingSink(gl);
+ fbo0Tex = fbo0.getSamplingSink();
+ } else {
+ fbo0Tex = fbo0.attachTexture2D(gl, 0, true);
+ }
+ numSamples=fbo0.getNumSamples();
+ fbo0.attachRenderbuffer(gl, Type.DEPTH, 24);
+ fbo0.unbind(gl);
+ }
+
+ @SuppressWarnings("unused")
+ private void resetFBOs(final GL gl, final int width, final int height) {
+ fbo0.reset(gl, width, height, numSamples, true);
+ numSamples = fbo0.getNumSamples();
+ if(numSamples>0) {
+ fbo0Tex = fbo0.getSamplingSink();
+ } else {
+ fbo0Tex = (TextureAttachment) fbo0.getColorbuffer(0);
+ }
+ }
+
+ @Override
+ public void init(final GLAutoDrawable drawable) {
+ final GL2ES2 gl = drawable.getGL().getGL2ES2();
+ dist.init(gl);
+
+ // We will do some offscreen rendering, setup FBO...
+ if( null != upstream ) {
+ final int[] textureSize = dist.textureSize;
+ initFBOs(gl, textureSize[0], textureSize[1]);
+ upstream.init(drawable);
+ }
+
+ gl.setSwapInterval(1);
+ }
+
+ @Override
+ public void dispose(final GLAutoDrawable drawable) {
+ final GL2ES2 gl = drawable.getGL().getGL2ES2();
+ // FIXME complete release
+ if( null != upstream ) {
+ upstream.dispose(drawable);
+ fbo0.destroy(gl);
+ }
+ if( ownsDist ) {
+ dist.dispose(gl);
+ }
+ }
+
+ @Override
+ public void display(final GLAutoDrawable drawable) {
+ final ovrFrameTiming frameTiming = OVR.ovrHmd_BeginFrameTiming(dist.hmdCtx, 0);
+
+ final GL2ES2 gl = drawable.getGL().getGL2ES2();
+ if(0 < numSamples) {
+ gl.glEnable(GL.GL_MULTISAMPLE);
+ }
+
+ // FIXME: Instead of setting the viewport,
+ // it's better to change the projection matrix!
+ if( null != upstream ) {
+ fbo0.bind(gl);
+
+ for(int eyeNum=0; eyeNum<2; eyeNum++) {
+ // final ovrPosef eyeRenderPose = OVR.ovrHmd_GetEyePose(hmdCtx, eyeNum);
+ // final float[] eyePos = OVRUtil.getVec3f(eyeRenderPose.getPosition());
+ final OVRDistortion.EyeData eyeDist = dist.eyes[eyeNum];
+ final int[] viewport = eyeDist.viewport;
+ gl.glViewport(viewport[0], viewport[1], viewport[2], viewport[3]);
+
+ dist.getSBSUpstreamPMV(eyeNum, eyePos, eyeYaw, frustumNear, frustumFar, mat4Projection, mat4Modelview);
+ upstream.setProjectionModelview(drawable, mat4Projection, mat4Modelview);
+ upstream.display(drawable, eyeNum > 0 ? GLEventListener2.DISPLAY_REPEAT | GLEventListener2.DISPLAY_DONTCLEAR : 0);
+ }
+ fbo0.unbind(gl);
+ gl.glViewport(0, 0, drawable.getSurfaceWidth(), drawable.getSurfaceHeight());
+ }
+
+ gl.glClearColor(0.0f, 0.0f, 0.0f, 0.0f);
+ gl.glClear(GL.GL_COLOR_BUFFER_BIT);
+ gl.glActiveTexture(GL.GL_TEXTURE0 + dist.texUnit0.intValue());
+
+ if( null != upstream ) {
+ fbo0.use(gl, fbo0Tex);
+ dist.display(gl, frameTiming.getTimewarpPointSeconds());
+ fbo0.unuse(gl);
+ } else {
+ dist.display(gl, frameTiming.getTimewarpPointSeconds());
+ }
+
+ if( !drawable.getAutoSwapBufferMode() ) {
+ drawable.swapBuffers();
+ }
+ OVR.ovrHmd_EndFrameTiming(dist.hmdCtx);
+ }
+
+ @Override
+ public void reshape(final GLAutoDrawable drawable, final int x, final int y, final int width, final int height) {
+ if( !drawable.getAutoSwapBufferMode() ) {
+ final GL2ES2 gl = drawable.getGL().getGL2ES2();
+ gl.glViewport(0, 0, width, height);
+ }
+ }
+}
diff --git a/src/oculusvr/classes/jogamp/opengl/oculusvr/OVRDistortion.java b/src/oculusvr/classes/jogamp/opengl/oculusvr/OVRDistortion.java
new file mode 100644
index 000000000..590b0e834
--- /dev/null
+++ b/src/oculusvr/classes/jogamp/opengl/oculusvr/OVRDistortion.java
@@ -0,0 +1,638 @@
+/**
+ * Copyright 2014 JogAmp Community. All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or without modification, are
+ * permitted provided that the following conditions are met:
+ *
+ * 1. Redistributions of source code must retain the above copyright notice, this list of
+ * conditions and the following disclaimer.
+ *
+ * 2. Redistributions in binary form must reproduce the above copyright notice, this list
+ * of conditions and the following disclaimer in the documentation and/or other materials
+ * provided with the distribution.
+ *
+ * THIS SOFTWARE IS PROVIDED BY JogAmp Community ``AS IS'' AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND
+ * FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL JogAmp Community OR
+ * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+ * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
+ * SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON
+ * ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
+ * NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
+ * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ *
+ * The views and conclusions contained in the software and documentation are those of the
+ * authors and should not be interpreted as representing official policies, either expressed
+ * or implied, of JogAmp Community.
+ */
+package jogamp.opengl.oculusvr;
+
+import java.nio.FloatBuffer;
+import java.nio.ShortBuffer;
+
+import javax.media.opengl.GL;
+import javax.media.opengl.GL2ES2;
+import javax.media.opengl.GLArrayData;
+import javax.media.opengl.GLException;
+import javax.media.opengl.GLUniformData;
+
+import com.jogamp.common.nio.Buffers;
+import com.jogamp.common.os.Platform;
+import com.jogamp.oculusvr.OVR;
+import com.jogamp.oculusvr.OVRException;
+import com.jogamp.oculusvr.OvrHmdContext;
+import com.jogamp.oculusvr.ovrDistortionMesh;
+import com.jogamp.oculusvr.ovrDistortionVertex;
+import com.jogamp.oculusvr.ovrEyeRenderDesc;
+import com.jogamp.oculusvr.ovrFovPort;
+import com.jogamp.oculusvr.ovrMatrix4f;
+import com.jogamp.oculusvr.ovrPosef;
+import com.jogamp.oculusvr.ovrRecti;
+import com.jogamp.oculusvr.ovrSizei;
+import com.jogamp.oculusvr.ovrVector2f;
+import com.jogamp.opengl.JoglVersion;
+import com.jogamp.opengl.math.FloatUtil;
+import com.jogamp.opengl.math.Quaternion;
+import com.jogamp.opengl.math.VectorUtil;
+import com.jogamp.opengl.util.GLArrayDataServer;
+import com.jogamp.opengl.util.glsl.ShaderCode;
+import com.jogamp.opengl.util.glsl.ShaderProgram;
+
+/**
+ * OculusVR Distortion Data and OpenGL Renderer Utility
+ */
+public class OVRDistortion {
+ public static final float[] VEC3_UP = { 0f, 1f, 0f };
+ public static final float[] VEC3_FORWARD = { 0f, 0f, -1f };
+
+ private static final String shaderPrefix01 = "dist01";
+ private static final String shaderTimewarpSuffix = "_timewarp";
+ private static final String shaderChromaSuffix = "_chroma";
+ private static final String shaderPlainSuffix = "_plain";
+
+ public static boolean useTimewarp(final int distortionCaps) { return 0 != ( distortionCaps & OVR.ovrDistortionCap_TimeWarp ) ; }
+ public static boolean useChromatic(final int distortionCaps) { return 0 != ( distortionCaps & OVR.ovrDistortionCap_Chromatic ) ; }
+ public static boolean useVignette(final int distortionCaps) { return 0 != ( distortionCaps & OVR.ovrDistortionCap_Vignette ) ; }
+
+ public static class EyeData {
+ public final int eyeName;
+ public final int distortionCaps;
+ public final int vertexCount;
+ public final int indexCount;
+ public final int[/*4*/] viewport;
+ public final float[/*3*/] viewAdjust;
+
+ public final GLUniformData eyeToSourceUVScale;
+ public final GLUniformData eyeToSourceUVOffset;
+ public final GLUniformData eyeRotationStart;
+ public final GLUniformData eyeRotationEnd;
+
+ /** 2+2+2+2+2: { vec2 position, vec2 color, vec2 texCoordR, vec2 texCoordG, vec2 texCoordB } */
+ public final GLArrayDataServer iVBO;
+ public final GLArrayData vboPos, vboParams, vboTexCoordsR, vboTexCoordsG, vboTexCoordsB;
+ public final GLArrayDataServer indices;
+
+ public final ovrEyeRenderDesc eyeRenderDesc;
+ public final ovrFovPort eyeRenderFov;
+
+ public ovrPosef eyeRenderPose;
+ public final Quaternion eyeRenderPoseOrientation;
+ public final float[] eyeRenderPosePosition;
+
+ public final boolean useTimewarp() { return OVRDistortion.useTimewarp(distortionCaps); }
+ public final boolean useChromatic() { return OVRDistortion.useChromatic(distortionCaps); }
+ public final boolean useVignette() { return OVRDistortion.useVignette(distortionCaps); }
+
+ private EyeData(final OvrHmdContext hmdCtx, final int distortionCaps,
+ final ovrEyeRenderDesc eyeRenderDesc, final ovrSizei ovrTextureSize, final int[] eyeRenderViewport) {
+ this.eyeName = eyeRenderDesc.getEye();
+ this.distortionCaps = distortionCaps;
+ viewport = new int[4];
+ viewAdjust = new float[3];
+ System.arraycopy(eyeRenderViewport, 0, viewport, 0, 4);
+
+ final FloatBuffer fstash = Buffers.newDirectFloatBuffer(2+2+16+26);
+
+ eyeToSourceUVScale = new GLUniformData("ovr_EyeToSourceUVScale", 2, Buffers.slice2Float(fstash, 0, 2));
+ eyeToSourceUVOffset = new GLUniformData("ovr_EyeToSourceUVOffset", 2, Buffers.slice2Float(fstash, 2, 2));
+
+ if( useTimewarp() ) {
+ eyeRotationStart = new GLUniformData("ovr_EyeRotationStart", 4, 4, Buffers.slice2Float(fstash, 4, 16));
+ eyeRotationEnd = new GLUniformData("ovr_EyeRotationEnd", 4, 4, Buffers.slice2Float(fstash, 20, 16));
+ } else {
+ eyeRotationStart = null;
+ eyeRotationEnd = null;
+ }
+
+ this.eyeRenderDesc = eyeRenderDesc;
+ this.eyeRenderFov = eyeRenderDesc.getFov();
+
+ this.eyeRenderPoseOrientation = new Quaternion();
+ this.eyeRenderPosePosition = new float[3];
+
+
+ final ovrDistortionMesh meshData = ovrDistortionMesh.create();
+ final ovrFovPort fov = eyeRenderDesc.getFov();
+
+ if( !OVR.ovrHmd_CreateDistortionMesh(hmdCtx, eyeName, fov, distortionCaps, meshData) ) {
+ throw new OVRException("Failed to create meshData for eye "+eyeName+" and "+OVRUtil.toString(fov));
+ }
+ vertexCount = meshData.getVertexCount();
+ indexCount = meshData.getIndexCount();
+
+ /** 2+2+2+2+2: { vec2 position, vec2 color, vec2 texCoordR, vec2 texCoordG, vec2 texCoordB } */
+ final boolean useChromatic = useChromatic();
+ final boolean useVignette = useVignette();
+ final int compsPerElement = 2+2+2+( useChromatic ? 2+2 /* texCoordG + texCoordB */: 0 );
+ iVBO = GLArrayDataServer.createGLSLInterleaved(compsPerElement, GL.GL_FLOAT, false, vertexCount, GL.GL_STATIC_DRAW);
+ vboPos = iVBO.addGLSLSubArray("ovr_Position", 2, GL.GL_ARRAY_BUFFER);
+ vboParams = iVBO.addGLSLSubArray("ovr_Params", 2, GL.GL_ARRAY_BUFFER);
+ vboTexCoordsR = iVBO.addGLSLSubArray("ovr_TexCoordR", 2, GL.GL_ARRAY_BUFFER);
+ if( useChromatic ) {
+ vboTexCoordsG = iVBO.addGLSLSubArray("ovr_TexCoordG", 2, GL.GL_ARRAY_BUFFER);
+ vboTexCoordsB = iVBO.addGLSLSubArray("ovr_TexCoordB", 2, GL.GL_ARRAY_BUFFER);
+ } else {
+ vboTexCoordsG = null;
+ vboTexCoordsB = null;
+ }
+ indices = GLArrayDataServer.createData(1, GL2ES2.GL_SHORT, indexCount, GL.GL_STATIC_DRAW, GL.GL_ELEMENT_ARRAY_BUFFER);
+ OVRUtil.copyVec3fToFloat(eyeRenderDesc.getViewAdjust(), viewAdjust);
+
+ // Setup: eyeToSourceUVScale, eyeToSourceUVOffset
+ {
+ final ovrVector2f[] uvScaleOffsetOut = new ovrVector2f[2];
+ uvScaleOffsetOut[0] = ovrVector2f.create(); // FIXME: remove ctor / double check
+ uvScaleOffsetOut[1] = ovrVector2f.create();
+
+ final ovrRecti ovrEyeRenderViewport = OVRUtil.createOVRRecti(eyeRenderViewport);
+ OVR.ovrHmd_GetRenderScaleAndOffset(fov, ovrTextureSize, ovrEyeRenderViewport, uvScaleOffsetOut);
+ if( OVRUtil.DEBUG ) {
+ System.err.println("XXX."+eyeName+": fov "+OVRUtil.toString(fov));
+ System.err.println("XXX."+eyeName+": uvScale "+OVRUtil.toString(uvScaleOffsetOut[0]));
+ System.err.println("XXX."+eyeName+": uvOffset "+OVRUtil.toString(uvScaleOffsetOut[0]));
+ System.err.println("XXX."+eyeName+": textureSize "+OVRUtil.toString(ovrTextureSize));
+ System.err.println("XXX."+eyeName+": viewport "+OVRUtil.toString(ovrEyeRenderViewport));
+ }
+ final FloatBuffer eyeToSourceUVScaleFB = eyeToSourceUVScale.floatBufferValue();
+ eyeToSourceUVScaleFB.put(0, uvScaleOffsetOut[0].getX());
+ eyeToSourceUVScaleFB.put(1, uvScaleOffsetOut[0].getY());
+ final FloatBuffer eyeToSourceUVOffsetFB = eyeToSourceUVOffset.floatBufferValue();
+ eyeToSourceUVOffsetFB.put(0, uvScaleOffsetOut[1].getX());
+ eyeToSourceUVOffsetFB.put(1, uvScaleOffsetOut[1].getY());
+ }
+
+ /** 2+2+2+2+2: { vec2 position, vec2 color, vec2 texCoordR, vec2 texCoordG, vec2 texCoordB } */
+ final FloatBuffer iVBOFB = (FloatBuffer)iVBO.getBuffer();
+ final ovrDistortionVertex[] ovRes = new ovrDistortionVertex[1];
+ ovRes[0] = ovrDistortionVertex.create(); // FIXME: remove ctor / double check
+
+ for ( int vertNum = 0; vertNum < vertexCount; vertNum++ ) {
+ final ovrDistortionVertex ov = meshData.getPVertexData(vertNum, ovRes)[0];
+ ovrVector2f v;
+
+ // pos
+ v = ov.getPos();
+ iVBOFB.put(v.getX());
+ iVBOFB.put(v.getY());
+
+ // params
+ if( useVignette ) {
+ iVBOFB.put(ov.getVignetteFactor());
+ } else {
+ iVBOFB.put(1.0f);
+ }
+ iVBOFB.put(ov.getTimeWarpFactor());
+
+ // texCoordR
+ v = ov.getTexR();
+ iVBOFB.put(v.getX());
+ iVBOFB.put(v.getY());
+
+ if( useChromatic ) {
+ // texCoordG
+ v = ov.getTexG();
+ iVBOFB.put(v.getX());
+ iVBOFB.put(v.getY());
+
+ // texCoordB
+ v = ov.getTexB();
+ iVBOFB.put(v.getX());
+ iVBOFB.put(v.getY());
+ }
+ }
+ if( OVRUtil.DEBUG ) {
+ System.err.println("XXX."+eyeName+": iVBO "+iVBO);
+ }
+ {
+ final ShortBuffer in = meshData.getPIndexData();
+ final ShortBuffer out = (ShortBuffer) indices.getBuffer();
+ out.put(in);
+ }
+ if( OVRUtil.DEBUG ) {
+ System.err.println("XXX."+eyeName+": idx "+indices);
+ System.err.println("XXX."+eyeName+": distEye "+this);
+ }
+ OVR.ovrHmd_DestroyDistortionMesh(meshData);
+ }
+
+ private void linkData(final GL2ES2 gl, final ShaderProgram sp) {
+ if( 0 > vboPos.setLocation(gl, sp.program()) ) {
+ throw new OVRException("Couldn't locate "+vboPos);
+ }
+ if( 0 > vboParams.setLocation(gl, sp.program()) ) {
+ throw new OVRException("Couldn't locate "+vboParams);
+ }
+ if( 0 > vboTexCoordsR.setLocation(gl, sp.program()) ) {
+ throw new OVRException("Couldn't locate "+vboTexCoordsR);
+ }
+ if( useChromatic() ) {
+ if( 0 > vboTexCoordsG.setLocation(gl, sp.program()) ) {
+ throw new OVRException("Couldn't locate "+vboTexCoordsG);
+ }
+ if( 0 > vboTexCoordsB.setLocation(gl, sp.program()) ) {
+ throw new OVRException("Couldn't locate "+vboTexCoordsB);
+ }
+ }
+ if( 0 > eyeToSourceUVScale.setLocation(gl, sp.program()) ) {
+ throw new OVRException("Couldn't locate "+eyeToSourceUVScale);
+ }
+ if( 0 > eyeToSourceUVOffset.setLocation(gl, sp.program()) ) {
+ throw new OVRException("Couldn't locate "+eyeToSourceUVOffset);
+ }
+ if( useTimewarp() ) {
+ if( 0 > eyeRotationStart.setLocation(gl, sp.program()) ) {
+ throw new OVRException("Couldn't locate "+eyeRotationStart);
+ }
+ if( 0 > eyeRotationEnd.setLocation(gl, sp.program()) ) {
+ throw new OVRException("Couldn't locate "+eyeRotationEnd);
+ }
+ }
+ iVBO.seal(gl, true);
+ iVBO.enableBuffer(gl, false);
+ indices.seal(gl, true);
+ indices.enableBuffer(gl, false);
+ }
+
+ public void dispose(final GL2ES2 gl) {
+ iVBO.destroy(gl);
+ indices.destroy(gl);
+ }
+ public void enableVBO(final GL2ES2 gl, final boolean enable) {
+ iVBO.enableBuffer(gl, enable);
+ indices.bindBuffer(gl, enable); // keeps VBO binding if enable:=true
+ }
+
+ public void updateUniform(final GL2ES2 gl, final ShaderProgram sp) {
+ gl.glUniform(eyeToSourceUVScale);
+ gl.glUniform(eyeToSourceUVOffset);
+ if( useTimewarp() ) {
+ gl.glUniform(eyeRotationStart);
+ gl.glUniform(eyeRotationEnd);
+ }
+ }
+
+ public void updateTimewarp(final OvrHmdContext hmdCtx, final ovrPosef eyeRenderPose, final float[] mat4Tmp1, final float[] mat4Tmp2) {
+ final ovrMatrix4f[] timeWarpMatrices = new ovrMatrix4f[2];
+ timeWarpMatrices[0] = ovrMatrix4f.create(); // FIXME: remove ctor / double check
+ timeWarpMatrices[1] = ovrMatrix4f.create();
+ OVR.ovrHmd_GetEyeTimewarpMatrices(hmdCtx, eyeName, eyeRenderPose, timeWarpMatrices);
+
+ final float[] eyeRotationStartM = FloatUtil.transposeMatrix(timeWarpMatrices[0].getM(0, mat4Tmp1), mat4Tmp2);
+ final FloatBuffer eyeRotationStartU = eyeRotationStart.floatBufferValue();
+ eyeRotationStartU.put(eyeRotationStartM);
+ eyeRotationStartU.rewind();
+
+ final float[] eyeRotationEndM = FloatUtil.transposeMatrix(timeWarpMatrices[1].getM(0, mat4Tmp1), mat4Tmp2);
+ final FloatBuffer eyeRotationEndU = eyeRotationEnd.floatBufferValue();
+ eyeRotationEndU.put(eyeRotationEndM);
+ eyeRotationEndU.rewind();
+ }
+
+ /**
+ * Updates {@link #eyeRenderPose} and it's extracted
+ * {@link #eyeRenderPoseOrientation} and {@link #eyeRenderPosePosition}.
+ * @param hmdCtx used get the {@link #eyeRenderPose} via {@link OVR#ovrHmd_GetEyePose(OvrHmdContext, int)}
+ */
+ public void updateEyePose(final OvrHmdContext hmdCtx) {
+ eyeRenderPose = OVR.ovrHmd_GetEyePose(hmdCtx, eyeName);
+ OVRUtil.copyToQuaternion(eyeRenderPose.getOrientation(), eyeRenderPoseOrientation);
+ OVRUtil.copyVec3fToFloat(eyeRenderPose.getPosition(), eyeRenderPosePosition);
+ }
+
+ @Override
+ public String toString() {
+ return "Eye["+eyeName+", viewport "+viewport[0]+"/"+viewport[1]+" "+viewport[2]+"x"+viewport[3]+
+ " viewAdjust["+viewAdjust[0]+", "+viewAdjust[1]+", "+viewAdjust[2]+
+ "], vertices "+vertexCount+", indices "+indexCount+
+ ", uvScale["+eyeToSourceUVScale.floatBufferValue().get(0)+", "+eyeToSourceUVScale.floatBufferValue().get(1)+
+ "], uvOffset["+eyeToSourceUVOffset.floatBufferValue().get(0)+", "+eyeToSourceUVOffset.floatBufferValue().get(1)+
+ "], desc"+OVRUtil.toString(eyeRenderDesc)+"]";
+ }
+ }
+
+ public final OvrHmdContext hmdCtx;
+ public final EyeData[] eyes;
+ public final int distortionCaps;
+ public final int[/*2*/] textureSize;
+ public final GLUniformData texUnit0;
+ public final boolean usesDistMesh;
+
+ private final float[] mat4Tmp1 = new float[16];
+ private final float[] mat4Tmp2 = new float[16];
+ private final float[] vec3Tmp1 = new float[3];
+ private final float[] vec3Tmp2 = new float[3];
+ private final float[] vec3Tmp3 = new float[3];
+
+ private ShaderProgram sp;
+
+ @Override
+ public String toString() {
+ return "OVRDist[caps 0x"+Integer.toHexString(distortionCaps)+", "+
+ ", tex "+textureSize[0]+"x"+textureSize[1]+
+ ", vignette "+useVignette()+", chromatic "+useChromatic()+", timewarp "+useTimewarp()+
+ ", "+Platform.NEWLINE+" "+eyes[0]+", "+Platform.NEWLINE+" "+eyes[1]+"]";
+ }
+
+ public static OVRDistortion create(final OvrHmdContext hmdCtx, final boolean sbsSingleTexture,
+ final ovrFovPort[] eyeFov, final float pixelsPerDisplayPixel, final int distortionCaps) {
+ final ovrEyeRenderDesc[] eyeRenderDesc = new ovrEyeRenderDesc[2];
+ eyeRenderDesc[0] = OVR.ovrHmd_GetRenderDesc(hmdCtx, OVR.ovrEye_Left, eyeFov[0]);
+ eyeRenderDesc[1] = OVR.ovrHmd_GetRenderDesc(hmdCtx, OVR.ovrEye_Right, eyeFov[1]);
+ if( OVRUtil.DEBUG ) {
+ System.err.println("XXX: eyeRenderDesc[0] "+OVRUtil.toString(eyeRenderDesc[0]));
+ System.err.println("XXX: eyeRenderDesc[1] "+OVRUtil.toString(eyeRenderDesc[1]));
+ }
+
+ final ovrSizei recommenedTex0Size = OVR.ovrHmd_GetFovTextureSize(hmdCtx, OVR.ovrEye_Left, eyeRenderDesc[0].getFov(), pixelsPerDisplayPixel);
+ final ovrSizei recommenedTex1Size = OVR.ovrHmd_GetFovTextureSize(hmdCtx, OVR.ovrEye_Right, eyeRenderDesc[1].getFov(), pixelsPerDisplayPixel);
+ if( OVRUtil.DEBUG ) {
+ System.err.println("XXX: recommenedTex0Size "+OVRUtil.toString(recommenedTex0Size));
+ System.err.println("XXX: recommenedTex1Size "+OVRUtil.toString(recommenedTex1Size));
+ }
+ final int[] textureSize = new int[2];
+ if( sbsSingleTexture ) {
+ textureSize[0] = recommenedTex0Size.getW() + recommenedTex1Size.getW();
+ } else {
+ textureSize[0] = Math.max(recommenedTex0Size.getW(), recommenedTex1Size.getW());
+ }
+ textureSize[1] = Math.max(recommenedTex0Size.getH(), recommenedTex1Size.getH());
+ if( OVRUtil.DEBUG ) {
+ System.err.println("XXX: textureSize "+textureSize[0]+"x"+textureSize[1]);
+ }
+
+ final int[][] eyeRenderViewports = new int[2][4];
+ if( sbsSingleTexture ) {
+ eyeRenderViewports[0][0] = 0;
+ eyeRenderViewports[0][1] = 0;
+ eyeRenderViewports[0][2] = textureSize[0] / 2;
+ eyeRenderViewports[0][3] = textureSize[1];
+ eyeRenderViewports[1][0] = (textureSize[0] + 1) / 2;
+ eyeRenderViewports[1][1] = 0;
+ eyeRenderViewports[1][2] = textureSize[0] / 2;
+ eyeRenderViewports[1][3] = textureSize[1];
+ } else {
+ eyeRenderViewports[0][0] = 0;
+ eyeRenderViewports[0][1] = 0;
+ eyeRenderViewports[0][2] = textureSize[0];
+ eyeRenderViewports[0][3] = textureSize[1];
+ eyeRenderViewports[1][0] = 0;
+ eyeRenderViewports[1][1] = 0;
+ eyeRenderViewports[1][2] = textureSize[0];
+ eyeRenderViewports[1][3] = textureSize[1];
+ }
+ return new OVRDistortion(hmdCtx, sbsSingleTexture, eyeRenderDesc, textureSize, eyeRenderViewports, distortionCaps, 0);
+ }
+
+ public OVRDistortion(final OvrHmdContext hmdCtx, final boolean sbsSingleTexture, final ovrEyeRenderDesc[] eyeRenderDescs,
+ final int[] textureSize, final int[][] eyeRenderViewports,
+ final int distortionCaps, final int textureUnit) {
+ this.hmdCtx = hmdCtx;
+ this.eyes = new EyeData[2];
+ this.distortionCaps = distortionCaps;
+ this.textureSize = new int[2];
+ System.arraycopy(textureSize, 0, this.textureSize, 0, 2);
+
+ texUnit0 = new GLUniformData("ovr_Texture0", textureUnit);
+ usesDistMesh = true;
+
+ final ovrSizei ovrTextureSize = OVRUtil.createOVRSizei(textureSize);
+ eyes[0] = new EyeData(hmdCtx, distortionCaps, eyeRenderDescs[0], ovrTextureSize, eyeRenderViewports[0]);
+ eyes[1] = new EyeData(hmdCtx, distortionCaps, eyeRenderDescs[1], ovrTextureSize, eyeRenderViewports[1]);
+ sp = null;
+ }
+
+ public final boolean useTimewarp() { return useTimewarp(distortionCaps); }
+ public final boolean useChromatic() { return useChromatic(distortionCaps); }
+ public final boolean useVignette() { return useVignette(distortionCaps); }
+
+ public void updateTimewarp(final ovrPosef eyeRenderPose, final int eyeNum) {
+ eyes[eyeNum].updateTimewarp(hmdCtx, eyeRenderPose, mat4Tmp1, mat4Tmp2);
+ }
+ public void updateTimewarp(final ovrPosef[] eyeRenderPoses) {
+ eyes[0].updateTimewarp(hmdCtx, eyeRenderPoses[0], mat4Tmp1, mat4Tmp2);
+ eyes[1].updateTimewarp(hmdCtx, eyeRenderPoses[1], mat4Tmp1, mat4Tmp2);
+ }
+
+ public void enableVBO(final GL2ES2 gl, final boolean enable, final int eyeNum) {
+ if( null == sp ) {
+ throw new IllegalStateException("Not initialized");
+ }
+ eyes[eyeNum].enableVBO(gl, enable);
+ }
+
+ public void updateUniforms(final GL2ES2 gl, final int eyeNum) {
+ if( null == sp ) {
+ throw new IllegalStateException("Not initialized");
+ }
+ gl.glUniform(texUnit0);
+ eyes[eyeNum].updateUniform(gl, sp);
+ }
+
+ public final ShaderProgram getShaderProgram() { return sp; }
+
+ public void init(final GL2ES2 gl) {
+ if( OVRUtil.DEBUG ) {
+ System.err.println(JoglVersion.getGLInfo(gl, null).toString());
+ }
+ if( null != sp ) {
+ throw new IllegalStateException("Already initialized");
+ }
+ final String vertexShaderBasename;
+ final String fragmentShaderBasename;
+ {
+ final StringBuilder sb = new StringBuilder();
+ sb.append(shaderPrefix01);
+ if( !useChromatic() && !useTimewarp() ) {
+ sb.append(shaderPlainSuffix);
+ } else if( useChromatic() && !useTimewarp() ) {
+ sb.append(shaderChromaSuffix);
+ } else if( useTimewarp() ) {
+ sb.append(shaderTimewarpSuffix);
+ if( useChromatic() ) {
+ sb.append(shaderChromaSuffix);
+ }
+ }
+ vertexShaderBasename = sb.toString();
+ sb.setLength(0);
+ sb.append(shaderPrefix01);
+ if( useChromatic() ) {
+ sb.append(shaderChromaSuffix);
+ } else {
+ sb.append(shaderPlainSuffix);
+ }
+ fragmentShaderBasename = sb.toString();
+ }
+ final ShaderCode vp0 = ShaderCode.create(gl, GL2ES2.GL_VERTEX_SHADER, OVRDistortion.class, "shader",
+ "shader/bin", vertexShaderBasename, true);
+ final ShaderCode fp0 = ShaderCode.create(gl, GL2ES2.GL_FRAGMENT_SHADER, OVRDistortion.class, "shader",
+ "shader/bin", fragmentShaderBasename, true);
+ vp0.defaultShaderCustomization(gl, true, true);
+ fp0.defaultShaderCustomization(gl, true, true);
+
+ sp = new ShaderProgram();
+ sp.add(gl, vp0, System.err);
+ sp.add(gl, fp0, System.err);
+ if(!sp.link(gl, System.err)) {
+ throw new GLException("could not link program: "+sp);
+ }
+ sp.useProgram(gl, true);
+ if( 0 > texUnit0.setLocation(gl, sp.program()) ) {
+ throw new OVRException("Couldn't locate "+texUnit0);
+ }
+ eyes[0].linkData(gl, sp);
+ eyes[1].linkData(gl, sp);
+ sp.useProgram(gl, false);
+ }
+
+ public void dispose(final GL2ES2 gl) {
+ sp.useProgram(gl, false);
+ eyes[0].dispose(gl);
+ eyes[1].dispose(gl);
+ sp.destroy(gl);
+ }
+
+ public void display(final GL2ES2 gl, final double timewarpPointSeconds) {
+ if( null == sp ) {
+ throw new IllegalStateException("Not initialized");
+ }
+ if( useTimewarp() ) {
+ OVR.ovr_WaitTillTime(timewarpPointSeconds);
+ }
+ gl.glDisable(GL.GL_CULL_FACE);
+ gl.glDisable(GL.GL_DEPTH_TEST);
+ gl.glDisable(GL.GL_BLEND);
+
+ if( !gl.isGLcore() ) {
+ gl.glEnable(GL.GL_TEXTURE_2D);
+ }
+
+ sp.useProgram(gl, true);
+
+ gl.glUniform(texUnit0);
+
+ for(int eyeNum=0; eyeNum<2; eyeNum++) {
+ final EyeData eye = eyes[eyeNum];
+ if( useTimewarp() ) {
+ eye.updateTimewarp(hmdCtx, eye.eyeRenderPose, mat4Tmp1, mat4Tmp2);
+ }
+ eye.updateUniform(gl, sp);
+ eye.enableVBO(gl, true);
+ if( usesDistMesh ) {
+ gl.glDrawElements(GL.GL_TRIANGLES, eye.indexCount, GL2ES2.GL_UNSIGNED_SHORT, 0);
+ } else {
+ gl.glDrawArrays(GL.GL_TRIANGLE_STRIP, 0, eye.vertexCount);
+ }
+ eyes[eyeNum].enableVBO(gl, false);
+ }
+
+ sp.useProgram(gl, false);
+ }
+
+ public void displayOneEyePre(final GL2ES2 gl, final double timewarpPointSeconds) {
+ if( null == sp ) {
+ throw new IllegalStateException("Not initialized");
+ }
+ if( useTimewarp() ) {
+ OVR.ovr_WaitTillTime(timewarpPointSeconds);
+ }
+ gl.glDisable(GL.GL_CULL_FACE);
+ gl.glDisable(GL.GL_DEPTH_TEST);
+ gl.glDisable(GL.GL_BLEND);
+
+ if( !gl.isGLcore() ) {
+ gl.glEnable(GL.GL_TEXTURE_2D);
+ }
+
+ sp.useProgram(gl, true);
+
+ gl.glUniform(texUnit0);
+ }
+
+ public void displayOneEye(final GL2ES2 gl, final int eyeNum) {
+ if( null == sp ) {
+ throw new IllegalStateException("Not initialized");
+ }
+ final EyeData eye = eyes[eyeNum];
+ if( useTimewarp() ) {
+ eye.updateTimewarp(hmdCtx, eye.eyeRenderPose, mat4Tmp1, mat4Tmp2);
+ }
+ eye.updateUniform(gl, sp);
+ eye.enableVBO(gl, true);
+ if( usesDistMesh ) {
+ gl.glDrawElements(GL.GL_TRIANGLES, eye.indexCount, GL2ES2.GL_UNSIGNED_SHORT, 0);
+ } else {
+ gl.glDrawArrays(GL.GL_TRIANGLE_STRIP, 0, eye.vertexCount);
+ }
+ eyes[eyeNum].enableVBO(gl, false);
+ }
+
+ public void displayOneEyePost(final GL2ES2 gl) {
+ sp.useProgram(gl, false);
+ }
+
+ /**
+ * Calculates the <i>Side By Side</i>, SBS, projection- and modelview matrix for one eye.
+ * <p>
+ * Method also issues {@link EyeData#updateEyePose(OvrHmdContext)}.
+ * </p>
+ * @param eyeNum eye denominator
+ * @param eyePos float[3] eye postion vector
+ * @param eyeYaw eye direction, i.e. {@link FloatUtil#PI} for 180 degrees
+ * @param near frustum near value
+ * @param far frustum far value
+ * @param mat4Projection float[16] projection matrix result
+ * @param mat4Modelview float[16] modelview matrix result
+ */
+ public void getSBSUpstreamPMV(final int eyeNum, final float[] eyePos, final float eyeYaw, final float near, final float far,
+ final float[] mat4Projection, final float[] mat4Modelview) {
+ final EyeData eyeDist = eyes[eyeNum];
+
+ eyeDist.updateEyePose(hmdCtx);
+
+ //
+ // Projection
+ //
+ final ovrMatrix4f pm = OVR.ovrMatrix4f_Projection(eyeDist.eyeRenderFov, near, far, true /* rightHanded*/);
+ /* final float[] mat4Projection = */ FloatUtil.transposeMatrix(pm.getM(0, mat4Tmp1), mat4Projection);
+
+ //
+ // Modelview
+ //
+ final Quaternion rollPitchYaw = new Quaternion();
+ rollPitchYaw.rotateByAngleY(eyeYaw);
+ final float[] shiftedEyePos = rollPitchYaw.rotateVector(vec3Tmp1, 0, eyeDist.eyeRenderPosePosition, 0);
+ VectorUtil.addVec3(shiftedEyePos, shiftedEyePos, eyePos);
+
+ rollPitchYaw.mult(eyeDist.eyeRenderPoseOrientation);
+ final float[] up = rollPitchYaw.rotateVector(vec3Tmp2, 0, VEC3_UP, 0);
+ final float[] forward = rollPitchYaw.rotateVector(vec3Tmp3, 0, VEC3_FORWARD, 0);
+ final float[] center = VectorUtil.addVec3(forward, shiftedEyePos, forward);
+
+ final float[] mLookAt = FloatUtil.makeLookAt(mat4Tmp2, 0, shiftedEyePos, 0, center, 0, up, 0, mat4Tmp1);
+ final float[] mViewAdjust = FloatUtil.makeTranslation(mat4Modelview, true, eyeDist.viewAdjust[0], eyeDist.viewAdjust[1], eyeDist.viewAdjust[2]);
+
+ /* mat4Modelview = */ FloatUtil.multMatrix(mViewAdjust, mLookAt);
+ }
+
+
+}
diff --git a/src/oculusvr/classes/jogamp/opengl/oculusvr/OVRUtil.java b/src/oculusvr/classes/jogamp/opengl/oculusvr/OVRUtil.java
new file mode 100644
index 000000000..e28a50203
--- /dev/null
+++ b/src/oculusvr/classes/jogamp/opengl/oculusvr/OVRUtil.java
@@ -0,0 +1,111 @@
+/**
+ * Copyright 2014 JogAmp Community. All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or without modification, are
+ * permitted provided that the following conditions are met:
+ *
+ * 1. Redistributions of source code must retain the above copyright notice, this list of
+ * conditions and the following disclaimer.
+ *
+ * 2. Redistributions in binary form must reproduce the above copyright notice, this list
+ * of conditions and the following disclaimer in the documentation and/or other materials
+ * provided with the distribution.
+ *
+ * THIS SOFTWARE IS PROVIDED BY JogAmp Community ``AS IS'' AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND
+ * FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL JogAmp Community OR
+ * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+ * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
+ * SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON
+ * ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
+ * NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
+ * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ *
+ * The views and conclusions contained in the software and documentation are those of the
+ * authors and should not be interpreted as representing official policies, either expressed
+ * or implied, of JogAmp Community.
+ */
+package jogamp.opengl.oculusvr;
+
+import jogamp.opengl.Debug;
+
+import com.jogamp.oculusvr.ovrEyeRenderDesc;
+import com.jogamp.oculusvr.ovrFovPort;
+import com.jogamp.oculusvr.ovrQuatf;
+import com.jogamp.oculusvr.ovrRecti;
+import com.jogamp.oculusvr.ovrSizei;
+import com.jogamp.oculusvr.ovrVector2f;
+import com.jogamp.oculusvr.ovrVector2i;
+import com.jogamp.oculusvr.ovrVector3f;
+import com.jogamp.opengl.math.Quaternion;
+
+/**
+ * OculusVR Data Conversion Helper Functions
+ */
+public class OVRUtil {
+ public static final boolean DEBUG = Debug.debug("OVR");
+
+ public static ovrRecti createOVRRecti(final int[] rect) {
+ final ovrRecti res = ovrRecti.create();
+ final ovrVector2i pos = res.getPos();
+ final ovrSizei size = res.getSize();
+ pos.setX(rect[0]);
+ pos.setY(rect[1]);
+ size.setW(rect[2]);
+ size.setH(rect[3]);
+ return res;
+ }
+ public static ovrRecti[] createOVRRectis(final int[][] rects) {
+ final ovrRecti[] res = new ovrRecti[rects.length];
+ for(int i=0; i<res.length; i++) {
+ res[0] = createOVRRecti(rects[i]);
+ }
+ return res;
+ }
+ public static ovrSizei createOVRSizei(final int[] size) {
+ final ovrSizei res = ovrSizei.create();
+ res.setW(size[0]);
+ res.setH(size[1]);
+ return res;
+ }
+ public static Quaternion getQuaternion(final ovrQuatf q) {
+ return new Quaternion(q.getX(), q.getY(), q.getZ(), q.getW());
+ }
+ public static void copyToQuaternion(final ovrQuatf in, final Quaternion out) {
+ out.set(in.getX(), in.getY(), in.getZ(), in.getW());
+ }
+ public static float[] getVec3f(final ovrVector3f v) {
+ return new float[] { v.getX(), v.getY(), v.getZ() };
+ }
+ public static void copyVec3fToFloat(final ovrVector3f v, final float[] res) {
+ res[0] = v.getX();
+ res[1] = v.getY();
+ res[2] = v.getZ();
+ }
+
+ public static String toString(final ovrFovPort fov) {
+ return "["+fov.getLeftTan()+" l, "+fov.getRightTan()+" r, "+
+ fov.getUpTan()+" u, "+fov.getDownTan()+" d]";
+ }
+ public static String toString(final ovrSizei rect) {
+ return "["+rect.getW()+" x "+rect.getH()+"]";
+ }
+ public static String toString(final ovrRecti rect) {
+ return "["+rect.getPos().getX()+" / "+rect.getPos().getY()+" "+
+ rect.getSize().getW()+" x "+rect.getSize().getH()+"]";
+ }
+ public static String toString(final ovrVector2f v2) {
+ return "["+v2.getX()+", "+v2.getY()+"]";
+ }
+ public static String toString(final ovrVector3f v3) {
+ return "["+v3.getX()+", "+v3.getY()+", "+v3.getZ()+"]";
+ }
+
+ public static String toString(final ovrEyeRenderDesc desc) {
+ return "["+desc.getEye()+", fov"+toString(desc.getFov())+
+ ", viewport"+toString(desc.getDistortedViewport())+
+ ", pptCtr"+toString(desc.getPixelsPerTanAngleAtCenter())+
+ ", view-adjust"+toString(desc.getViewAdjust())+"]";
+ }
+
+}
diff --git a/src/oculusvr/classes/jogamp/opengl/oculusvr/shader/dist01_chroma.fp b/src/oculusvr/classes/jogamp/opengl/oculusvr/shader/dist01_chroma.fp
new file mode 100644
index 000000000..6d450fe40
--- /dev/null
+++ b/src/oculusvr/classes/jogamp/opengl/oculusvr/shader/dist01_chroma.fp
@@ -0,0 +1,26 @@
+//Copyright 2014 JogAmp Community. All rights reserved.
+
+#if __VERSION__ >= 130
+ #define varying in
+ out vec4 ovr_FragColor;
+ #define texture2D texture
+#else
+ #define ovr_FragColor gl_FragColor
+#endif
+
+uniform sampler2D ovr_Texture0;
+
+varying vec3 ovv_Fade;
+varying vec2 ovv_TexCoordR;
+varying vec2 ovv_TexCoordG;
+varying vec2 ovv_TexCoordB;
+
+void main (void)
+{
+ // 3 samples for fixing chromatic aberrations
+ vec3 color = vec3(texture2D(ovr_Texture0, ovv_TexCoordR).r,
+ texture2D(ovr_Texture0, ovv_TexCoordG).g,
+ texture2D(ovr_Texture0, ovv_TexCoordB).b);
+ ovr_FragColor = vec4(ovv_Fade * color, 1.0); // include vignetteFade
+}
+
diff --git a/src/oculusvr/classes/jogamp/opengl/oculusvr/shader/dist01_chroma.vp b/src/oculusvr/classes/jogamp/opengl/oculusvr/shader/dist01_chroma.vp
new file mode 100644
index 000000000..254d4b081
--- /dev/null
+++ b/src/oculusvr/classes/jogamp/opengl/oculusvr/shader/dist01_chroma.vp
@@ -0,0 +1,33 @@
+//Copyright 2014 JogAmp Community. All rights reserved.
+
+#if __VERSION__ >= 130
+ #define attribute in
+ #define varying out
+#endif
+
+uniform vec2 ovr_EyeToSourceUVScale;
+uniform vec2 ovr_EyeToSourceUVOffset;
+
+attribute vec2 ovr_Position;
+attribute vec2 ovr_Params;
+attribute vec2 ovr_TexCoordR;
+attribute vec2 ovr_TexCoordG;
+attribute vec2 ovr_TexCoordB;
+
+varying vec3 ovv_Fade;
+varying vec2 ovv_TexCoordR;
+varying vec2 ovv_TexCoordG;
+varying vec2 ovv_TexCoordB;
+
+void main(void)
+{
+ gl_Position = vec4(ovr_Position.xy, 0.5, 1.0);
+ ovv_Fade = vec3(ovr_Params.r); // vignetteFade
+
+ ovv_TexCoordR = ovr_TexCoordR * ovr_EyeToSourceUVScale + ovr_EyeToSourceUVOffset;
+ ovv_TexCoordR.y = 1.0-ovv_TexCoordR.y;
+ ovv_TexCoordG = ovr_TexCoordG * ovr_EyeToSourceUVScale + ovr_EyeToSourceUVOffset;
+ ovv_TexCoordG.y = 1.0-ovv_TexCoordG.y;
+ ovv_TexCoordB = ovr_TexCoordB * ovr_EyeToSourceUVScale + ovr_EyeToSourceUVOffset;
+ ovv_TexCoordB.y = 1.0-ovv_TexCoordB.y;
+}
diff --git a/src/oculusvr/classes/jogamp/opengl/oculusvr/shader/dist01_plain.fp b/src/oculusvr/classes/jogamp/opengl/oculusvr/shader/dist01_plain.fp
new file mode 100644
index 000000000..d3ee5d04d
--- /dev/null
+++ b/src/oculusvr/classes/jogamp/opengl/oculusvr/shader/dist01_plain.fp
@@ -0,0 +1,22 @@
+//Copyright 2014 JogAmp Community. All rights reserved.
+
+#if __VERSION__ >= 130
+ #define varying in
+ out vec4 ovr_FragColor;
+ #define texture2D texture
+#else
+ #define ovr_FragColor gl_FragColor
+#endif
+
+uniform sampler2D ovr_Texture0;
+
+varying vec3 ovv_Fade;
+varying vec2 ovv_TexCoordR;
+
+void main (void)
+{
+ // 3 samples for fixing chromatic aberrations
+ vec3 color = texture2D(ovr_Texture0, ovv_TexCoordR).rgb;
+ ovr_FragColor = vec4(ovv_Fade * color, 1.0); // include vignetteFade
+}
+
diff --git a/src/oculusvr/classes/jogamp/opengl/oculusvr/shader/dist01_plain.vp b/src/oculusvr/classes/jogamp/opengl/oculusvr/shader/dist01_plain.vp
new file mode 100644
index 000000000..6456c7a83
--- /dev/null
+++ b/src/oculusvr/classes/jogamp/opengl/oculusvr/shader/dist01_plain.vp
@@ -0,0 +1,27 @@
+//Copyright 2014 JogAmp Community. All rights reserved.
+
+#if __VERSION__ >= 130
+ #define attribute in
+ #define varying out
+#endif
+
+uniform vec2 ovr_EyeToSourceUVScale;
+uniform vec2 ovr_EyeToSourceUVOffset;
+
+attribute vec2 ovr_Position;
+attribute vec2 ovr_Params;
+attribute vec2 ovr_TexCoordR;
+
+varying vec3 ovv_Fade;
+varying vec2 ovv_TexCoordR;
+
+void main(void)
+{
+ gl_Position = vec4(ovr_Position.xy, 0.5, 1.0);
+ ovv_Fade = vec3(ovr_Params.r); // vignetteFade
+
+ // Vertex inputs are in TanEyeAngle space for the R,G,B channels (i.e. after chromatic aberration and distortion).
+ // Scale them into the correct [0-1],[0-1] UV lookup space (depending on eye)
+ ovv_TexCoordR = ovr_TexCoordR * ovr_EyeToSourceUVScale + ovr_EyeToSourceUVOffset;
+ ovv_TexCoordR.y = 1.0-ovv_TexCoordR.y;
+}
diff --git a/src/oculusvr/classes/jogamp/opengl/oculusvr/shader/dist01_timewarp.vp b/src/oculusvr/classes/jogamp/opengl/oculusvr/shader/dist01_timewarp.vp
new file mode 100644
index 000000000..3485625d2
--- /dev/null
+++ b/src/oculusvr/classes/jogamp/opengl/oculusvr/shader/dist01_timewarp.vp
@@ -0,0 +1,44 @@
+//Copyright 2014 JogAmp Community. All rights reserved.
+
+#if __VERSION__ >= 130
+ #define attribute in
+ #define varying out
+#endif
+
+uniform vec2 ovr_EyeToSourceUVScale;
+uniform vec2 ovr_EyeToSourceUVOffset;
+uniform mat4 ovr_EyeRotationStart;
+uniform mat4 ovr_EyeRotationEnd;
+
+attribute vec2 ovr_Position;
+attribute vec2 ovr_Params;
+attribute vec2 ovr_TexCoordR;
+
+varying vec3 ovv_Fade;
+varying vec2 ovv_TexCoordR;
+
+void main(void)
+{
+ gl_Position = vec4(ovr_Position.xy, 0.0, 1.0);
+ ovv_Fade = vec3(ovr_Params.r); // vignetteFade
+
+ // Vertex inputs are in TanEyeAngle space for the R,G,B channels (i.e. after chromatic aberration and distortion).
+ // These are now "real world" vectors in direction (x,y,1) relative to the eye of the HMD.
+ vec3 TanEyeAngle = vec3 ( ovr_TexCoordR, 1.0 );
+
+ // Accurate time warp lerp vs. faster
+ // Apply the two 3x3 timewarp rotations to these vectors.
+ vec3 TransformedStart = (ovr_EyeRotationStart * vec4(TanEyeAngle, 0)).xyz;
+ vec3 TransformedEnd = (ovr_EyeRotationEnd * vec4(TanEyeAngle, 0)).xyz;
+ // And blend between them.
+ vec3 Transformed = mix ( TransformedStart, TransformedEnd, ovr_Params.g /* timewarpLerpFactor */ );
+
+ // Project them back onto the Z=1 plane of the rendered images.
+ float RecipZ = 1.0 / Transformed.z;
+ vec2 Flattened = vec2 ( Transformed.x * RecipZ, Transformed.y * RecipZ );
+
+ // These are now still in TanEyeAngle space.
+ // Scale them into the correct [0-1],[0-1] UV lookup space (depending on eye)
+ ovv_TexCoordR = Flattened * ovr_EyeToSourceUVScale + ovr_EyeToSourceUVOffset;
+ ovv_TexCoordR.y = 1.0-ovv_TexCoordR.y;
+}
diff --git a/src/oculusvr/classes/jogamp/opengl/oculusvr/shader/dist01_timewarp_chroma.vp b/src/oculusvr/classes/jogamp/opengl/oculusvr/shader/dist01_timewarp_chroma.vp
new file mode 100644
index 000000000..e2a45e430
--- /dev/null
+++ b/src/oculusvr/classes/jogamp/opengl/oculusvr/shader/dist01_timewarp_chroma.vp
@@ -0,0 +1,65 @@
+//Copyright 2014 JogAmp Community. All rights reserved.
+
+#if __VERSION__ >= 130
+ #define attribute in
+ #define varying out
+#endif
+
+uniform vec2 ovr_EyeToSourceUVScale;
+uniform vec2 ovr_EyeToSourceUVOffset;
+uniform mat4 ovr_EyeRotationStart;
+uniform mat4 ovr_EyeRotationEnd;
+
+attribute vec2 ovr_Position;
+attribute vec2 ovr_Params;
+attribute vec2 ovr_TexCoordR;
+attribute vec2 ovr_TexCoordG;
+attribute vec2 ovr_TexCoordB;
+
+varying vec3 ovv_Fade;
+varying vec2 ovv_TexCoordR;
+varying vec2 ovv_TexCoordG;
+varying vec2 ovv_TexCoordB;
+
+void main(void)
+{
+ gl_Position = vec4(ovr_Position.xy, 0.0, 1.0);
+ ovv_Fade = vec3(ovr_Params.r); // vignetteFade
+
+ // Vertex inputs are in TanEyeAngle space for the R,G,B channels (i.e. after chromatic aberration and distortion).
+ // These are now "real world" vectors in direction (x,y,1) relative to the eye of the HMD.
+ vec3 TanEyeAngleR = vec3 ( ovr_TexCoordR, 1.0 );
+ vec3 TanEyeAngleG = vec3 ( ovr_TexCoordG, 1.0 );
+ vec3 TanEyeAngleB = vec3 ( ovr_TexCoordB, 1.0 );
+
+ // Accurate time warp lerp vs. faster
+ // Apply the two 3x3 timewarp rotations to these vectors.
+ vec3 TransformedRStart = (ovr_EyeRotationStart * vec4(TanEyeAngleR, 0)).xyz;
+ vec3 TransformedGStart = (ovr_EyeRotationStart * vec4(TanEyeAngleG, 0)).xyz;
+ vec3 TransformedBStart = (ovr_EyeRotationStart * vec4(TanEyeAngleB, 0)).xyz;
+ vec3 TransformedREnd = (ovr_EyeRotationEnd * vec4(TanEyeAngleR, 0)).xyz;
+ vec3 TransformedGEnd = (ovr_EyeRotationEnd * vec4(TanEyeAngleG, 0)).xyz;
+ vec3 TransformedBEnd = (ovr_EyeRotationEnd * vec4(TanEyeAngleB, 0)).xyz;
+
+ // And blend between them.
+ vec3 TransformedR = mix ( TransformedRStart, TransformedREnd, ovr_Params.g /* timewarpLerpFactor */ );
+ vec3 TransformedG = mix ( TransformedGStart, TransformedGEnd, ovr_Params.g /* timewarpLerpFactor */ );
+ vec3 TransformedB = mix ( TransformedBStart, TransformedBEnd, ovr_Params.g /* timewarpLerpFactor */ );
+
+ // Project them back onto the Z=1 plane of the rendered images.
+ float RecipZR = 1.0 / TransformedR.z;
+ float RecipZG = 1.0 / TransformedG.z;
+ float RecipZB = 1.0 / TransformedB.z;
+ vec2 FlattenedR = vec2 ( TransformedR.x * RecipZR, TransformedR.y * RecipZR );
+ vec2 FlattenedG = vec2 ( TransformedG.x * RecipZG, TransformedG.y * RecipZG );
+ vec2 FlattenedB = vec2 ( TransformedB.x * RecipZB, TransformedB.y * RecipZB );
+
+ // These are now still in TanEyeAngle space.
+ // Scale them into the correct [0-1],[0-1] UV lookup space (depending on eye)
+ ovv_TexCoordR = FlattenedR * ovr_EyeToSourceUVScale + ovr_EyeToSourceUVOffset;
+ ovv_TexCoordR.y = 1.0-ovv_TexCoordR.y;
+ ovv_TexCoordG = FlattenedG * ovr_EyeToSourceUVScale + ovr_EyeToSourceUVOffset;
+ ovv_TexCoordG.y = 1.0-ovv_TexCoordG.y;
+ ovv_TexCoordB = FlattenedB * ovr_EyeToSourceUVScale + ovr_EyeToSourceUVOffset;
+ ovv_TexCoordB.y = 1.0-ovv_TexCoordB.y;
+}