diff options
Diffstat (limited to 'src/oculusvr/classes/jogamp/opengl')
10 files changed, 1172 insertions, 0 deletions
diff --git a/src/oculusvr/classes/jogamp/opengl/oculusvr/OVRStereoDevice.java b/src/oculusvr/classes/jogamp/opengl/oculusvr/OVRStereoDevice.java new file mode 100644 index 000000000..09a348c46 --- /dev/null +++ b/src/oculusvr/classes/jogamp/opengl/oculusvr/OVRStereoDevice.java @@ -0,0 +1,158 @@ +/** + * Copyright 2014 JogAmp Community. All rights reserved. + * + * Redistribution and use in source and binary forms, with or without modification, are + * permitted provided that the following conditions are met: + * + * 1. Redistributions of source code must retain the above copyright notice, this list of + * conditions and the following disclaimer. + * + * 2. Redistributions in binary form must reproduce the above copyright notice, this list + * of conditions and the following disclaimer in the documentation and/or other materials + * provided with the distribution. + * + * THIS SOFTWARE IS PROVIDED BY JogAmp Community ``AS IS'' AND ANY EXPRESS OR IMPLIED + * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND + * FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL JogAmp Community OR + * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR + * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR + * SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON + * ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING + * NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF + * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + * + * The views and conclusions contained in the software and documentation are those of the + * authors and should not be interpreted as representing official policies, either expressed + * or implied, of JogAmp Community. + */ +package jogamp.opengl.oculusvr; + +import javax.media.nativewindow.util.Dimension; +import javax.media.nativewindow.util.DimensionImmutable; +import javax.media.nativewindow.util.PointImmutable; +import javax.media.nativewindow.util.Rectangle; +import javax.media.nativewindow.util.RectangleImmutable; + +import com.jogamp.oculusvr.OVR; +import com.jogamp.oculusvr.OvrHmdContext; +import com.jogamp.oculusvr.ovrEyeRenderDesc; +import com.jogamp.oculusvr.ovrFovPort; +import com.jogamp.oculusvr.ovrHmdDesc; +import com.jogamp.oculusvr.ovrSizei; +import com.jogamp.opengl.math.FovHVHalves; +import com.jogamp.opengl.util.stereo.StereoDevice; +import com.jogamp.opengl.util.stereo.StereoDeviceRenderer; + +public class OVRStereoDevice implements StereoDevice { + public final OvrHmdContext handle; + public final int deviceIndex; + public final ovrHmdDesc hmdDesc; + + private boolean sensorsStarted = false; + + public OVRStereoDevice(final OvrHmdContext nativeContext, final int deviceIndex) { + this.handle = nativeContext; + this.deviceIndex = deviceIndex; + this.hmdDesc = ovrHmdDesc.create(); + OVR.ovrHmd_GetDesc(handle, hmdDesc); + } + + @Override + public final void dispose() { + // NOP + } + + @Override + public final PointImmutable getPosition() { + return OVRUtil.getVec2iAsPoint(hmdDesc.getWindowsPos()); + } + + @Override + public final DimensionImmutable getSurfaceSize() { + return OVRUtil.getOVRSizei(hmdDesc.getResolution()); + } + + @Override + public final FovHVHalves[] getDefaultFOV() { + final ovrFovPort[] defaultEyeFov = hmdDesc.getDefaultEyeFov(0, new ovrFovPort[2]); + final FovHVHalves[] eyeFov = new FovHVHalves[2]; + eyeFov[0] = OVRUtil.getFovHV(defaultEyeFov[0]); + eyeFov[1] = OVRUtil.getFovHV(defaultEyeFov[1]); + return eyeFov; + } + + @Override + public final boolean startSensors(final boolean start) { + if( start && !sensorsStarted ) { + // Start the sensor which provides the Rift’s pose and motion. + final int requiredSensorCaps = 0; + final int supportedSensorCaps = requiredSensorCaps | OVR.ovrSensorCap_Orientation | OVR.ovrSensorCap_YawCorrection | OVR.ovrSensorCap_Position; + if( OVR.ovrHmd_StartSensor(handle, supportedSensorCaps, requiredSensorCaps) ) { + sensorsStarted = true; + return true; + } else { + sensorsStarted = false; + return false; + } + } else if( sensorsStarted ) { + OVR.ovrHmd_StopSensor(handle); + sensorsStarted = false; + return true; + } else { + // No state change -> Success + return true; + } + } + @Override + public boolean getSensorsStarted() { return sensorsStarted; } + + @Override + public final StereoDeviceRenderer createRenderer(final int distortionBits, + final int textureCount, final float[] eyePositionOffset, + final FovHVHalves[] eyeFov, final float pixelsPerDisplayPixel, final int textureUnit) { + final ovrFovPort ovrEyeFov0 = OVRUtil.getOVRFovPort(eyeFov[0]); + final ovrFovPort ovrEyeFov1 = OVRUtil.getOVRFovPort(eyeFov[1]); + + final ovrEyeRenderDesc[] eyeRenderDesc = new ovrEyeRenderDesc[2]; + eyeRenderDesc[0] = OVR.ovrHmd_GetRenderDesc(handle, OVR.ovrEye_Left, ovrEyeFov0); + eyeRenderDesc[1] = OVR.ovrHmd_GetRenderDesc(handle, OVR.ovrEye_Right, ovrEyeFov1); + if( StereoDevice.DEBUG ) { + System.err.println("XXX: eyeRenderDesc[0] "+OVRUtil.toString(eyeRenderDesc[0])); + System.err.println("XXX: eyeRenderDesc[1] "+OVRUtil.toString(eyeRenderDesc[1])); + } + + final ovrSizei recommenedTex0Size = OVR.ovrHmd_GetFovTextureSize(handle, OVR.ovrEye_Left, eyeRenderDesc[0].getFov(), pixelsPerDisplayPixel); + final ovrSizei recommenedTex1Size = OVR.ovrHmd_GetFovTextureSize(handle, OVR.ovrEye_Right, eyeRenderDesc[1].getFov(), pixelsPerDisplayPixel); + if( StereoDevice.DEBUG ) { + System.err.println("XXX: recommenedTex0Size "+OVRUtil.toString(recommenedTex0Size)); + System.err.println("XXX: recommenedTex1Size "+OVRUtil.toString(recommenedTex1Size)); + } + final int unifiedW = Math.max(recommenedTex0Size.getW(), recommenedTex1Size.getW()); + final int unifiedH = Math.max(recommenedTex0Size.getH(), recommenedTex1Size.getH()); + + final DimensionImmutable singleTextureSize = new Dimension(unifiedW, unifiedH); + final DimensionImmutable totalTextureSize = new Dimension(recommenedTex0Size.getW() + recommenedTex1Size.getW(), unifiedH); + if( StereoDevice.DEBUG ) { + System.err.println("XXX: textureSize Single "+singleTextureSize); + System.err.println("XXX: textureSize Total "+totalTextureSize); + } + + final RectangleImmutable[] eyeRenderViewports = new RectangleImmutable[2]; + if( 1 == textureCount ) { // validated in ctor below! + eyeRenderViewports[0] = new Rectangle(0, 0, + totalTextureSize.getWidth() / 2, + totalTextureSize.getHeight()); + + eyeRenderViewports[1] = new Rectangle((totalTextureSize.getWidth() + 1) / 2, 0, + totalTextureSize.getWidth() / 2, + totalTextureSize.getHeight()); + } else { + eyeRenderViewports[0] = new Rectangle(0, 0, + singleTextureSize.getWidth(), + singleTextureSize.getHeight()); + eyeRenderViewports[1] = eyeRenderViewports[0]; + } + return new OVRStereoDeviceRenderer(this, distortionBits, textureCount, eyePositionOffset, + eyeRenderDesc, singleTextureSize, totalTextureSize, eyeRenderViewports, textureUnit); + } +}
\ No newline at end of file diff --git a/src/oculusvr/classes/jogamp/opengl/oculusvr/OVRStereoDeviceFactory.java b/src/oculusvr/classes/jogamp/opengl/oculusvr/OVRStereoDeviceFactory.java new file mode 100644 index 000000000..06454e443 --- /dev/null +++ b/src/oculusvr/classes/jogamp/opengl/oculusvr/OVRStereoDeviceFactory.java @@ -0,0 +1,51 @@ +/** + * Copyright 2014 JogAmp Community. All rights reserved. + * + * Redistribution and use in source and binary forms, with or without modification, are + * permitted provided that the following conditions are met: + * + * 1. Redistributions of source code must retain the above copyright notice, this list of + * conditions and the following disclaimer. + * + * 2. Redistributions in binary form must reproduce the above copyright notice, this list + * of conditions and the following disclaimer in the documentation and/or other materials + * provided with the distribution. + * + * THIS SOFTWARE IS PROVIDED BY JogAmp Community ``AS IS'' AND ANY EXPRESS OR IMPLIED + * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND + * FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL JogAmp Community OR + * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR + * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR + * SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON + * ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING + * NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF + * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + * + * The views and conclusions contained in the software and documentation are those of the + * authors and should not be interpreted as representing official policies, either expressed + * or implied, of JogAmp Community. + */ +package jogamp.opengl.oculusvr; + +import com.jogamp.oculusvr.OVR; +import com.jogamp.oculusvr.OVRVersion; +import com.jogamp.oculusvr.OvrHmdContext; +import com.jogamp.opengl.util.stereo.StereoDevice; +import com.jogamp.opengl.util.stereo.StereoDeviceFactory; + +public class OVRStereoDeviceFactory extends StereoDeviceFactory { + + public static boolean isAvailable() { + return OVR.ovr_Initialize(); // recursive .. + } + + @Override + public final StereoDevice createDevice(final int deviceIndex, final boolean verbose) { + final OvrHmdContext hmdCtx = OVR.ovrHmd_Create(deviceIndex); + final OVRStereoDevice ctx = new OVRStereoDevice(hmdCtx, deviceIndex); + if( verbose ) { + System.err.println(OVRVersion.getAvailableCapabilitiesInfo(ctx.hmdDesc, deviceIndex, null).toString()); + } + return ctx; + } +} diff --git a/src/oculusvr/classes/jogamp/opengl/oculusvr/OVRStereoDeviceRenderer.java b/src/oculusvr/classes/jogamp/opengl/oculusvr/OVRStereoDeviceRenderer.java new file mode 100644 index 000000000..012ad183d --- /dev/null +++ b/src/oculusvr/classes/jogamp/opengl/oculusvr/OVRStereoDeviceRenderer.java @@ -0,0 +1,590 @@ +/** + * Copyright 2014 JogAmp Community. All rights reserved. + * + * Redistribution and use in source and binary forms, with or without modification, are + * permitted provided that the following conditions are met: + * + * 1. Redistributions of source code must retain the above copyright notice, this list of + * conditions and the following disclaimer. + * + * 2. Redistributions in binary form must reproduce the above copyright notice, this list + * of conditions and the following disclaimer in the documentation and/or other materials + * provided with the distribution. + * + * THIS SOFTWARE IS PROVIDED BY JogAmp Community ``AS IS'' AND ANY EXPRESS OR IMPLIED + * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND + * FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL JogAmp Community OR + * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR + * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR + * SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON + * ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING + * NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF + * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + * + * The views and conclusions contained in the software and documentation are those of the + * authors and should not be interpreted as representing official policies, either expressed + * or implied, of JogAmp Community. + */ +package jogamp.opengl.oculusvr; + +import java.nio.FloatBuffer; +import java.nio.ShortBuffer; + +import javax.media.nativewindow.util.DimensionImmutable; +import javax.media.nativewindow.util.RectangleImmutable; +import javax.media.opengl.GL; +import javax.media.opengl.GL2ES2; +import javax.media.opengl.GLArrayData; +import javax.media.opengl.GLException; +import javax.media.opengl.GLUniformData; + +import jogamp.common.os.PlatformPropsImpl; + +import com.jogamp.common.nio.Buffers; +import com.jogamp.oculusvr.OVR; +import com.jogamp.oculusvr.OVRException; +import com.jogamp.oculusvr.OvrHmdContext; +import com.jogamp.oculusvr.ovrDistortionMesh; +import com.jogamp.oculusvr.ovrDistortionVertex; +import com.jogamp.oculusvr.ovrEyeRenderDesc; +import com.jogamp.oculusvr.ovrFovPort; +import com.jogamp.oculusvr.ovrFrameTiming; +import com.jogamp.oculusvr.ovrMatrix4f; +import com.jogamp.oculusvr.ovrPosef; +import com.jogamp.oculusvr.ovrRecti; +import com.jogamp.oculusvr.ovrSizei; +import com.jogamp.oculusvr.ovrVector2f; +import com.jogamp.oculusvr.ovrVector3f; +import com.jogamp.opengl.JoglVersion; +import com.jogamp.opengl.math.FloatUtil; +import com.jogamp.opengl.util.GLArrayDataServer; +import com.jogamp.opengl.util.glsl.ShaderCode; +import com.jogamp.opengl.util.glsl.ShaderProgram; +import com.jogamp.opengl.util.stereo.EyeParameter; +import com.jogamp.opengl.util.stereo.EyePose; +import com.jogamp.opengl.util.stereo.StereoDevice; +import com.jogamp.opengl.util.stereo.StereoDeviceRenderer; +import com.jogamp.opengl.util.stereo.StereoUtil; + +/** + * OculusVR Distortion Data and OpenGL Renderer Utility + */ +public class OVRStereoDeviceRenderer implements StereoDeviceRenderer { + private static final String shaderPrefix01 = "dist01"; + private static final String shaderTimewarpSuffix = "_timewarp"; + private static final String shaderChromaSuffix = "_chroma"; + private static final String shaderPlainSuffix = "_plain"; + + public static class OVREye implements StereoDeviceRenderer.Eye { + private final int eyeName; + private final int distortionBits; + private final int vertexCount; + private final int indexCount; + private final RectangleImmutable viewport; + + private final GLUniformData eyeToSourceUVScale; + private final GLUniformData eyeToSourceUVOffset; + private final GLUniformData eyeRotationStart; + private final GLUniformData eyeRotationEnd; + + /** 2+2+2+2+2: { vec2 position, vec2 color, vec2 texCoordR, vec2 texCoordG, vec2 texCoordB } */ + private final GLArrayDataServer iVBO; + private final GLArrayData vboPos, vboParams, vboTexCoordsR, vboTexCoordsG, vboTexCoordsB; + private final GLArrayDataServer indices; + + private final ovrEyeRenderDesc ovrEyeDesc; + private final ovrFovPort ovrEyeFov; + private final EyeParameter eyeParameter; + + private ovrPosef ovrEyePose; + private final EyePose eyePose; + + @Override + public final RectangleImmutable getViewport() { return viewport; } + + @Override + public final EyeParameter getEyeParameter() { return eyeParameter; } + + @Override + public final EyePose getLastEyePose() { return eyePose; } + + private OVREye(final OvrHmdContext hmdCtx, final int distortionBits, + final float[] eyePositionOffset, final ovrEyeRenderDesc eyeDesc, + final ovrSizei ovrTextureSize, final RectangleImmutable eyeViewport) { + this.eyeName = eyeDesc.getEye(); + this.distortionBits = distortionBits; + this.viewport = eyeViewport; + + final boolean usesTimewarp = StereoUtil.usesTimewarpDistortion(distortionBits); + final FloatBuffer fstash = Buffers.newDirectFloatBuffer( 2 + 2 + ( usesTimewarp ? 16 + 16 : 0 ) ) ; + + eyeToSourceUVScale = new GLUniformData("ovr_EyeToSourceUVScale", 2, Buffers.slice2Float(fstash, 0, 2)); + eyeToSourceUVOffset = new GLUniformData("ovr_EyeToSourceUVOffset", 2, Buffers.slice2Float(fstash, 2, 2)); + + if( usesTimewarp ) { + eyeRotationStart = new GLUniformData("ovr_EyeRotationStart", 4, 4, Buffers.slice2Float(fstash, 4, 16)); + eyeRotationEnd = new GLUniformData("ovr_EyeRotationEnd", 4, 4, Buffers.slice2Float(fstash, 20, 16)); + } else { + eyeRotationStart = null; + eyeRotationEnd = null; + } + + this.ovrEyeDesc = eyeDesc; + this.ovrEyeFov = eyeDesc.getFov(); + + final ovrVector3f eyeViewAdjust = eyeDesc.getViewAdjust(); + this.eyeParameter = new EyeParameter(eyeName, eyePositionOffset, OVRUtil.getFovHV(ovrEyeFov), + eyeViewAdjust.getX(), eyeViewAdjust.getY(), eyeViewAdjust.getZ()); + + this.eyePose = new EyePose(eyeName); + + updateEyePose(hmdCtx); // 1st init + + final ovrDistortionMesh meshData = ovrDistortionMesh.create(); + final ovrFovPort fov = eyeDesc.getFov(); + + final int ovrDistortionCaps = distBits2OVRDistCaps(distortionBits); + if( !OVR.ovrHmd_CreateDistortionMesh(hmdCtx, eyeName, fov, ovrDistortionCaps, meshData) ) { + throw new OVRException("Failed to create meshData for eye "+eyeName+", "+OVRUtil.toString(fov)+" and "+StereoUtil.distortionBitsToString(distortionBits)); + } + vertexCount = meshData.getVertexCount(); + indexCount = meshData.getIndexCount(); + + /** 2+2+2+2+2: { vec2 position, vec2 color, vec2 texCoordR, vec2 texCoordG, vec2 texCoordB } */ + final boolean useChromatic = StereoUtil.usesChromaticDistortion(distortionBits); + final boolean useVignette = StereoUtil.usesVignetteDistortion(distortionBits); + + final int compsPerElement = 2+2+2+( useChromatic ? 2+2 /* texCoordG + texCoordB */: 0 ); + iVBO = GLArrayDataServer.createGLSLInterleaved(compsPerElement, GL.GL_FLOAT, false, vertexCount, GL.GL_STATIC_DRAW); + vboPos = iVBO.addGLSLSubArray("ovr_Position", 2, GL.GL_ARRAY_BUFFER); + vboParams = iVBO.addGLSLSubArray("ovr_Params", 2, GL.GL_ARRAY_BUFFER); + vboTexCoordsR = iVBO.addGLSLSubArray("ovr_TexCoordR", 2, GL.GL_ARRAY_BUFFER); + if( useChromatic ) { + vboTexCoordsG = iVBO.addGLSLSubArray("ovr_TexCoordG", 2, GL.GL_ARRAY_BUFFER); + vboTexCoordsB = iVBO.addGLSLSubArray("ovr_TexCoordB", 2, GL.GL_ARRAY_BUFFER); + } else { + vboTexCoordsG = null; + vboTexCoordsB = null; + } + indices = GLArrayDataServer.createData(1, GL.GL_SHORT, indexCount, GL.GL_STATIC_DRAW, GL.GL_ELEMENT_ARRAY_BUFFER); + + // Setup: eyeToSourceUVScale, eyeToSourceUVOffset + { + final ovrVector2f[] uvScaleOffsetOut = new ovrVector2f[2]; + uvScaleOffsetOut[0] = ovrVector2f.create(); // FIXME: remove ctor / double check + uvScaleOffsetOut[1] = ovrVector2f.create(); + + final ovrRecti ovrEyeRenderViewport = OVRUtil.createOVRRecti(eyeViewport); + OVR.ovrHmd_GetRenderScaleAndOffset(fov, ovrTextureSize, ovrEyeRenderViewport, uvScaleOffsetOut); + if( StereoDevice.DEBUG ) { + System.err.println("XXX."+eyeName+": fov "+OVRUtil.toString(fov)); + System.err.println("XXX."+eyeName+": uvScale "+OVRUtil.toString(uvScaleOffsetOut[0])); + System.err.println("XXX."+eyeName+": uvOffset "+OVRUtil.toString(uvScaleOffsetOut[1])); + System.err.println("XXX."+eyeName+": textureSize "+OVRUtil.toString(ovrTextureSize)); + System.err.println("XXX."+eyeName+": viewport "+OVRUtil.toString(ovrEyeRenderViewport)); + } + final FloatBuffer eyeToSourceUVScaleFB = eyeToSourceUVScale.floatBufferValue(); + eyeToSourceUVScaleFB.put(0, uvScaleOffsetOut[0].getX()); + eyeToSourceUVScaleFB.put(1, uvScaleOffsetOut[0].getY()); + final FloatBuffer eyeToSourceUVOffsetFB = eyeToSourceUVOffset.floatBufferValue(); + eyeToSourceUVOffsetFB.put(0, uvScaleOffsetOut[1].getX()); + eyeToSourceUVOffsetFB.put(1, uvScaleOffsetOut[1].getY()); + } + + /** 2+2+2+2+2: { vec2 position, vec2 color, vec2 texCoordR, vec2 texCoordG, vec2 texCoordB } */ + final FloatBuffer iVBOFB = (FloatBuffer)iVBO.getBuffer(); + final ovrDistortionVertex[] ovRes = new ovrDistortionVertex[1]; + ovRes[0] = ovrDistortionVertex.create(); // FIXME: remove ctor / double check + + for ( int vertNum = 0; vertNum < vertexCount; vertNum++ ) { + final ovrDistortionVertex ov = meshData.getPVertexData(vertNum, ovRes)[0]; + ovrVector2f v; + + // pos + v = ov.getPos(); + iVBOFB.put(v.getX()); + iVBOFB.put(v.getY()); + + // params + if( useVignette ) { + iVBOFB.put(ov.getVignetteFactor()); + } else { + iVBOFB.put(1.0f); + } + iVBOFB.put(ov.getTimeWarpFactor()); + + // texCoordR + v = ov.getTexR(); + iVBOFB.put(v.getX()); + iVBOFB.put(v.getY()); + + if( useChromatic ) { + // texCoordG + v = ov.getTexG(); + iVBOFB.put(v.getX()); + iVBOFB.put(v.getY()); + + // texCoordB + v = ov.getTexB(); + iVBOFB.put(v.getX()); + iVBOFB.put(v.getY()); + } + } + if( StereoDevice.DEBUG ) { + System.err.println("XXX."+eyeName+": iVBO "+iVBO); + } + { + final ShortBuffer in = meshData.getPIndexData(); + final ShortBuffer out = (ShortBuffer) indices.getBuffer(); + out.put(in); + } + if( StereoDevice.DEBUG ) { + System.err.println("XXX."+eyeName+": idx "+indices); + System.err.println("XXX."+eyeName+": "+this); + } + OVR.ovrHmd_DestroyDistortionMesh(meshData); + } + + private void linkData(final GL2ES2 gl, final ShaderProgram sp) { + if( 0 > vboPos.setLocation(gl, sp.program()) ) { + throw new OVRException("Couldn't locate "+vboPos); + } + if( 0 > vboParams.setLocation(gl, sp.program()) ) { + throw new OVRException("Couldn't locate "+vboParams); + } + if( 0 > vboTexCoordsR.setLocation(gl, sp.program()) ) { + throw new OVRException("Couldn't locate "+vboTexCoordsR); + } + if( StereoUtil.usesChromaticDistortion(distortionBits) ) { + if( 0 > vboTexCoordsG.setLocation(gl, sp.program()) ) { + throw new OVRException("Couldn't locate "+vboTexCoordsG); + } + if( 0 > vboTexCoordsB.setLocation(gl, sp.program()) ) { + throw new OVRException("Couldn't locate "+vboTexCoordsB); + } + } + if( 0 > eyeToSourceUVScale.setLocation(gl, sp.program()) ) { + throw new OVRException("Couldn't locate "+eyeToSourceUVScale); + } + if( 0 > eyeToSourceUVOffset.setLocation(gl, sp.program()) ) { + throw new OVRException("Couldn't locate "+eyeToSourceUVOffset); + } + if( StereoUtil.usesTimewarpDistortion(distortionBits) ) { + if( 0 > eyeRotationStart.setLocation(gl, sp.program()) ) { + throw new OVRException("Couldn't locate "+eyeRotationStart); + } + if( 0 > eyeRotationEnd.setLocation(gl, sp.program()) ) { + throw new OVRException("Couldn't locate "+eyeRotationEnd); + } + } + iVBO.seal(gl, true); + iVBO.enableBuffer(gl, false); + indices.seal(gl, true); + indices.enableBuffer(gl, false); + } + + private void dispose(final GL2ES2 gl) { + iVBO.destroy(gl); + indices.destroy(gl); + } + private void enableVBO(final GL2ES2 gl, final boolean enable) { + iVBO.enableBuffer(gl, enable); + indices.bindBuffer(gl, enable); // keeps VBO binding if enable:=true + } + + private void updateUniform(final GL2ES2 gl, final ShaderProgram sp) { + gl.glUniform(eyeToSourceUVScale); + gl.glUniform(eyeToSourceUVOffset); + if( StereoUtil.usesTimewarpDistortion(distortionBits) ) { + gl.glUniform(eyeRotationStart); + gl.glUniform(eyeRotationEnd); + } + } + + private void updateTimewarp(final OvrHmdContext hmdCtx, final ovrPosef eyeRenderPose, final float[] mat4Tmp1, final float[] mat4Tmp2) { + final ovrMatrix4f[] timeWarpMatrices = new ovrMatrix4f[2]; + timeWarpMatrices[0] = ovrMatrix4f.create(); // FIXME: remove ctor / double check + timeWarpMatrices[1] = ovrMatrix4f.create(); + OVR.ovrHmd_GetEyeTimewarpMatrices(hmdCtx, eyeName, eyeRenderPose, timeWarpMatrices); + + final float[] eyeRotationStartM = FloatUtil.transposeMatrix(timeWarpMatrices[0].getM(0, mat4Tmp1), mat4Tmp2); + final FloatBuffer eyeRotationStartU = eyeRotationStart.floatBufferValue(); + eyeRotationStartU.put(eyeRotationStartM); + eyeRotationStartU.rewind(); + + final float[] eyeRotationEndM = FloatUtil.transposeMatrix(timeWarpMatrices[1].getM(0, mat4Tmp1), mat4Tmp2); + final FloatBuffer eyeRotationEndU = eyeRotationEnd.floatBufferValue(); + eyeRotationEndU.put(eyeRotationEndM); + eyeRotationEndU.rewind(); + } + + /** + * Updates {@link #ovrEyePose} and it's extracted + * {@link #eyeRenderPoseOrientation} and {@link #eyeRenderPosePosition}. + * @param hmdCtx used get the {@link #ovrEyePose} via {@link OVR#ovrHmd_GetEyePose(OvrHmdContext, int)} + */ + private EyePose updateEyePose(final OvrHmdContext hmdCtx) { + ovrEyePose = OVR.ovrHmd_GetEyePose(hmdCtx, eyeName); + final ovrVector3f pos = ovrEyePose.getPosition(); + eyePose.setPosition(pos.getX(), pos.getY(), pos.getZ()); + OVRUtil.copyToQuaternion(ovrEyePose.getOrientation(), eyePose.orientation); + return eyePose; + } + + @Override + public String toString() { + return "Eye["+eyeName+", viewport "+viewport+ + ", "+eyeParameter+ + ", vertices "+vertexCount+", indices "+indexCount+ + ", uvScale["+eyeToSourceUVScale.floatBufferValue().get(0)+", "+eyeToSourceUVScale.floatBufferValue().get(1)+ + "], uvOffset["+eyeToSourceUVOffset.floatBufferValue().get(0)+", "+eyeToSourceUVOffset.floatBufferValue().get(1)+ + "], desc"+OVRUtil.toString(ovrEyeDesc)+", "+eyePose+"]"; + } + } + + private final OVRStereoDevice context; + private final OVREye[] eyes; + private final int distortionBits; + private final int textureCount; + private final DimensionImmutable singleTextureSize; + private final DimensionImmutable totalTextureSize; + private final GLUniformData texUnit0; + + + private final float[] mat4Tmp1 = new float[16]; + private final float[] mat4Tmp2 = new float[16]; + + private ShaderProgram sp; + private ovrFrameTiming frameTiming; + + @Override + public String toString() { + return "OVRDist[distortion["+StereoUtil.distortionBitsToString(distortionBits)+ + "], singleSize "+singleTextureSize+ + ", sbsSize "+totalTextureSize+ + ", texCount "+textureCount+", texUnit "+getTextureUnit()+ + ", "+PlatformPropsImpl.NEWLINE+" "+eyes[0]+", "+PlatformPropsImpl.NEWLINE+" "+eyes[1]+"]"; + } + + + private static int distBits2OVRDistCaps(final int distortionBits) { + int bits = 0; + if( StereoUtil.usesTimewarpDistortion(distortionBits) ) { + bits |= OVR.ovrDistortionCap_TimeWarp; + } + if( StereoUtil.usesChromaticDistortion(distortionBits) ) { + bits |= OVR.ovrDistortionCap_Chromatic; + } + if( StereoUtil.usesVignetteDistortion(distortionBits) ) { + bits |= OVR.ovrDistortionCap_Vignette; + } + return bits; + } + + /* pp */ OVRStereoDeviceRenderer(final OVRStereoDevice context, final int distortionBits, + final int textureCount, final float[] eyePositionOffset, + final ovrEyeRenderDesc[] eyeRenderDescs, final DimensionImmutable singleTextureSize, final DimensionImmutable totalTextureSize, + final RectangleImmutable[] eyeViewports, final int textureUnit) { + if( 1 > textureCount || 2 < textureCount ) { + throw new IllegalArgumentException("textureCount can only be 1 or 2, has "+textureCount); + } + this.context = context; + this.eyes = new OVREye[2]; + this.distortionBits = distortionBits | StereoDeviceRenderer.DISTORTION_BARREL /* always */; + this.textureCount = textureCount; + this.singleTextureSize = singleTextureSize; + this.totalTextureSize = totalTextureSize; + + texUnit0 = new GLUniformData("ovr_Texture0", textureUnit); + + final ovrSizei ovrTextureSize = OVRUtil.createOVRSizei( 1 == textureCount ? totalTextureSize : singleTextureSize ); + eyes[0] = new OVREye(context.handle, this.distortionBits, eyePositionOffset, eyeRenderDescs[0], ovrTextureSize, eyeViewports[0]); + eyes[1] = new OVREye(context.handle, this.distortionBits, eyePositionOffset, eyeRenderDescs[1], ovrTextureSize, eyeViewports[1]); + sp = null; + frameTiming = null; + } + + @Override + public StereoDevice getDevice() { + return context; + } + + @Override + public final int getDistortionBits() { return distortionBits; } + + @Override + public final boolean usesSideBySideStereo() { return true; } + + @Override + public final DimensionImmutable getSingleSurfaceSize() { return singleTextureSize; } + + @Override + public final DimensionImmutable getTotalSurfaceSize() { return totalTextureSize; } + + @Override + public final int getTextureCount() { return textureCount; } + + @Override + public final int getTextureUnit() { return texUnit0.intValue(); } + + @Override + public final boolean ppRequired() { return true; } + + @Override + public final void init(final GL gl) { + if( StereoDevice.DEBUG ) { + System.err.println(JoglVersion.getGLInfo(gl, null).toString()); + } + if( null != sp ) { + throw new IllegalStateException("Already initialized"); + } + final GL2ES2 gl2es2 = gl.getGL2ES2(); + + final String vertexShaderBasename; + final String fragmentShaderBasename; + { + final boolean usesTimewarp = StereoUtil.usesTimewarpDistortion(distortionBits); + final boolean usesChromatic = StereoUtil.usesChromaticDistortion(distortionBits); + + final StringBuilder sb = new StringBuilder(); + sb.append(shaderPrefix01); + if( !usesChromatic && !usesTimewarp ) { + sb.append(shaderPlainSuffix); + } else if( usesChromatic && !usesTimewarp ) { + sb.append(shaderChromaSuffix); + } else if( usesTimewarp ) { + sb.append(shaderTimewarpSuffix); + if( usesChromatic ) { + sb.append(shaderChromaSuffix); + } + } + vertexShaderBasename = sb.toString(); + sb.setLength(0); + sb.append(shaderPrefix01); + if( usesChromatic ) { + sb.append(shaderChromaSuffix); + } else { + sb.append(shaderPlainSuffix); + } + fragmentShaderBasename = sb.toString(); + } + final ShaderCode vp0 = ShaderCode.create(gl2es2, GL2ES2.GL_VERTEX_SHADER, OVRStereoDeviceRenderer.class, "shader", + "shader/bin", vertexShaderBasename, true); + final ShaderCode fp0 = ShaderCode.create(gl2es2, GL2ES2.GL_FRAGMENT_SHADER, OVRStereoDeviceRenderer.class, "shader", + "shader/bin", fragmentShaderBasename, true); + vp0.defaultShaderCustomization(gl2es2, true, true); + fp0.defaultShaderCustomization(gl2es2, true, true); + + sp = new ShaderProgram(); + sp.add(gl2es2, vp0, System.err); + sp.add(gl2es2, fp0, System.err); + if(!sp.link(gl2es2, System.err)) { + throw new GLException("could not link program: "+sp); + } + sp.useProgram(gl2es2, true); + if( 0 > texUnit0.setLocation(gl2es2, sp.program()) ) { + throw new OVRException("Couldn't locate "+texUnit0); + } + eyes[0].linkData(gl2es2, sp); + eyes[1].linkData(gl2es2, sp); + sp.useProgram(gl2es2, false); + } + + @Override + public final void dispose(final GL gl) { + final GL2ES2 gl2es2 = gl.getGL2ES2(); + sp.useProgram(gl2es2, false); + eyes[0].dispose(gl2es2); + eyes[1].dispose(gl2es2); + sp.destroy(gl2es2); + frameTiming = null; + } + + @Override + public final Eye getEye(final int eyeNum) { + return eyes[eyeNum]; + } + + @Override + public final EyePose updateEyePose(final int eyeNum) { + return eyes[eyeNum].updateEyePose(context.handle); + } + + @Override + public final void beginFrame(final GL gl) { + frameTiming = OVR.ovrHmd_BeginFrameTiming(context.handle, 0); + } + + @Override + public final void endFrame(final GL gl) { + if( null == frameTiming ) { + throw new IllegalStateException("beginFrame not called"); + } + OVR.ovrHmd_EndFrameTiming(context.handle); + frameTiming = null; + } + + @Override + public final void ppBegin(final GL gl) { + if( null == sp ) { + throw new IllegalStateException("Not initialized"); + } + if( null == frameTiming ) { + throw new IllegalStateException("beginFrame not called"); + } + if( StereoUtil.usesTimewarpDistortion(distortionBits) ) { + OVR.ovr_WaitTillTime(frameTiming.getTimewarpPointSeconds()); + } + final GL2ES2 gl2es2 = gl.getGL2ES2(); + + gl.glClearColor(0.0f, 0.0f, 0.0f, 0.0f); + gl.glClear(GL.GL_COLOR_BUFFER_BIT); + gl.glActiveTexture(GL.GL_TEXTURE0 + getTextureUnit()); + + gl2es2.glDisable(GL.GL_CULL_FACE); + gl2es2.glDisable(GL.GL_DEPTH_TEST); + gl2es2.glDisable(GL.GL_BLEND); + + if( !gl2es2.isGLcore() ) { + gl2es2.glEnable(GL.GL_TEXTURE_2D); + } + + sp.useProgram(gl2es2, true); + + gl2es2.glUniform(texUnit0); + } + + @Override + public final void ppBothEyes(final GL gl) { + final GL2ES2 gl2es2 = gl.getGL2ES2(); + for(int eyeNum=0; eyeNum<2; eyeNum++) { + final OVREye eye = eyes[eyeNum]; + if( StereoUtil.usesTimewarpDistortion(distortionBits) ) { + eye.updateTimewarp(context.handle, eye.ovrEyePose, mat4Tmp1, mat4Tmp2); + } + eye.updateUniform(gl2es2, sp); + eye.enableVBO(gl2es2, true); + gl2es2.glDrawElements(GL.GL_TRIANGLES, eye.indexCount, GL.GL_UNSIGNED_SHORT, 0); + eyes[eyeNum].enableVBO(gl2es2, false); + } + } + + @Override + public final void ppOneEye(final GL gl, final int eyeNum) { + final OVREye eye = eyes[eyeNum]; + if( StereoUtil.usesTimewarpDistortion(distortionBits) ) { + eye.updateTimewarp(context.handle, eye.ovrEyePose, mat4Tmp1, mat4Tmp2); + } + final GL2ES2 gl2es2 = gl.getGL2ES2(); + + eye.updateUniform(gl2es2, sp); + eye.enableVBO(gl2es2, true); + gl2es2.glDrawElements(GL.GL_TRIANGLES, eye.indexCount, GL.GL_UNSIGNED_SHORT, 0); + eyes[eyeNum].enableVBO(gl2es2, false); + } + + @Override + public final void ppEnd(final GL gl) { + sp.useProgram(gl.getGL2ES2(), false); + } +} diff --git a/src/oculusvr/classes/jogamp/opengl/oculusvr/OVRUtil.java b/src/oculusvr/classes/jogamp/opengl/oculusvr/OVRUtil.java new file mode 100644 index 000000000..48222ea97 --- /dev/null +++ b/src/oculusvr/classes/jogamp/opengl/oculusvr/OVRUtil.java @@ -0,0 +1,156 @@ +/** + * Copyright 2014 JogAmp Community. All rights reserved. + * + * Redistribution and use in source and binary forms, with or without modification, are + * permitted provided that the following conditions are met: + * + * 1. Redistributions of source code must retain the above copyright notice, this list of + * conditions and the following disclaimer. + * + * 2. Redistributions in binary form must reproduce the above copyright notice, this list + * of conditions and the following disclaimer in the documentation and/or other materials + * provided with the distribution. + * + * THIS SOFTWARE IS PROVIDED BY JogAmp Community ``AS IS'' AND ANY EXPRESS OR IMPLIED + * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND + * FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL JogAmp Community OR + * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR + * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR + * SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON + * ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING + * NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF + * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + * + * The views and conclusions contained in the software and documentation are those of the + * authors and should not be interpreted as representing official policies, either expressed + * or implied, of JogAmp Community. + */ +package jogamp.opengl.oculusvr; + +import javax.media.nativewindow.util.Dimension; +import javax.media.nativewindow.util.DimensionImmutable; +import javax.media.nativewindow.util.Point; +import javax.media.nativewindow.util.PointImmutable; +import javax.media.nativewindow.util.RectangleImmutable; + +import com.jogamp.oculusvr.ovrEyeRenderDesc; +import com.jogamp.oculusvr.ovrFovPort; +import com.jogamp.oculusvr.ovrQuatf; +import com.jogamp.oculusvr.ovrRecti; +import com.jogamp.oculusvr.ovrSizei; +import com.jogamp.oculusvr.ovrVector2f; +import com.jogamp.oculusvr.ovrVector2i; +import com.jogamp.oculusvr.ovrVector3f; +import com.jogamp.opengl.math.FovHVHalves; +import com.jogamp.opengl.math.Quaternion; + +/** + * OculusVR Data Conversion Helper Functions + */ +public class OVRUtil { + public static ovrRecti createOVRRecti(final int[] rect) { + final ovrRecti res = ovrRecti.create(); + final ovrVector2i pos = res.getPos(); + final ovrSizei size = res.getSize(); + pos.setX(rect[0]); + pos.setY(rect[1]); + size.setW(rect[2]); + size.setH(rect[3]); + return res; + } + public static ovrRecti createOVRRecti(final RectangleImmutable rect) { + final ovrRecti res = ovrRecti.create(); + final ovrVector2i pos = res.getPos(); + final ovrSizei size = res.getSize(); + pos.setX(rect.getX()); + pos.setY(rect.getY()); + size.setW(rect.getWidth()); + size.setH(rect.getHeight()); + return res; + } + public static ovrRecti[] createOVRRectis(final int[][] rects) { + final ovrRecti[] res = new ovrRecti[rects.length]; + for(int i=0; i<res.length; i++) { + res[0] = createOVRRecti(rects[i]); + } + return res; + } + public static ovrSizei createOVRSizei(final int[] size) { + final ovrSizei res = ovrSizei.create(); + res.setW(size[0]); + res.setH(size[1]); + return res; + } + public static ovrSizei createOVRSizei(final DimensionImmutable size) { + final ovrSizei res = ovrSizei.create(); + res.setW(size.getWidth()); + res.setH(size.getHeight()); + return res; + } + public static DimensionImmutable getOVRSizei(final ovrSizei v) { + return new Dimension(v.getW(), v.getH()); + } + public static PointImmutable getVec2iAsPoint(final ovrVector2i v) { + return new Point(v.getX(), v.getY()); + } + public static int[] getVec2i(final ovrVector2i v) { + return new int[] { v.getX(), v.getY() }; + } + public static void copyVec2iToInt(final ovrVector2i v, final int[] res) { + res[0] = v.getX(); + res[1] = v.getY(); + } + public static float[] getVec3f(final ovrVector3f v) { + return new float[] { v.getX(), v.getY(), v.getZ() }; + } + public static void copyVec3fToFloat(final ovrVector3f v, final float[] res) { + res[0] = v.getX(); + res[1] = v.getY(); + res[2] = v.getZ(); + } + public static Quaternion getQuaternion(final ovrQuatf q) { + return new Quaternion(q.getX(), q.getY(), q.getZ(), q.getW()); + } + public static void copyToQuaternion(final ovrQuatf in, final Quaternion out) { + out.set(in.getX(), in.getY(), in.getZ(), in.getW()); + } + + public static FovHVHalves getFovHV(final ovrFovPort tanHalfFov) { + return new FovHVHalves(tanHalfFov.getLeftTan(), tanHalfFov.getRightTan(), + tanHalfFov.getUpTan(), tanHalfFov.getDownTan(), + true); + } + public static ovrFovPort getOVRFovPort(final FovHVHalves fovHVHalves) { + final ovrFovPort tanHalfFov = ovrFovPort.create(); + final FovHVHalves fovHVHalvesTan = fovHVHalves.getInTangents(); + tanHalfFov.setLeftTan(fovHVHalvesTan.left); + tanHalfFov.setRightTan(fovHVHalvesTan.right); + tanHalfFov.setUpTan(fovHVHalvesTan.top); + tanHalfFov.setDownTan(fovHVHalvesTan.bottom); + return tanHalfFov; + } + + public static String toString(final ovrFovPort fov) { + return "["+fov.getLeftTan()+" l, "+fov.getRightTan()+" r, "+ + fov.getUpTan()+" u, "+fov.getDownTan()+" d]"; + } + public static String toString(final ovrSizei rect) { + return "["+rect.getW()+" x "+rect.getH()+"]"; + } + public static String toString(final ovrRecti rect) { + return "["+rect.getPos().getX()+" / "+rect.getPos().getY()+" "+ + rect.getSize().getW()+" x "+rect.getSize().getH()+"]"; + } + public static String toString(final ovrVector2f v2) { + return "["+v2.getX()+", "+v2.getY()+"]"; + } + public static String toString(final ovrVector3f v3) { + return "["+v3.getX()+", "+v3.getY()+", "+v3.getZ()+"]"; + } + public static String toString(final ovrEyeRenderDesc desc) { + return "["+desc.getEye()+", fov"+toString(desc.getFov())+ + ", viewport"+toString(desc.getDistortedViewport())+ + ", pptCtr"+toString(desc.getPixelsPerTanAngleAtCenter())+ + ", view-adjust"+toString(desc.getViewAdjust())+"]"; + } +} diff --git a/src/oculusvr/classes/jogamp/opengl/oculusvr/shader/dist01_chroma.fp b/src/oculusvr/classes/jogamp/opengl/oculusvr/shader/dist01_chroma.fp new file mode 100644 index 000000000..6d450fe40 --- /dev/null +++ b/src/oculusvr/classes/jogamp/opengl/oculusvr/shader/dist01_chroma.fp @@ -0,0 +1,26 @@ +//Copyright 2014 JogAmp Community. All rights reserved. + +#if __VERSION__ >= 130 + #define varying in + out vec4 ovr_FragColor; + #define texture2D texture +#else + #define ovr_FragColor gl_FragColor +#endif + +uniform sampler2D ovr_Texture0; + +varying vec3 ovv_Fade; +varying vec2 ovv_TexCoordR; +varying vec2 ovv_TexCoordG; +varying vec2 ovv_TexCoordB; + +void main (void) +{ + // 3 samples for fixing chromatic aberrations + vec3 color = vec3(texture2D(ovr_Texture0, ovv_TexCoordR).r, + texture2D(ovr_Texture0, ovv_TexCoordG).g, + texture2D(ovr_Texture0, ovv_TexCoordB).b); + ovr_FragColor = vec4(ovv_Fade * color, 1.0); // include vignetteFade +} + diff --git a/src/oculusvr/classes/jogamp/opengl/oculusvr/shader/dist01_chroma.vp b/src/oculusvr/classes/jogamp/opengl/oculusvr/shader/dist01_chroma.vp new file mode 100644 index 000000000..254d4b081 --- /dev/null +++ b/src/oculusvr/classes/jogamp/opengl/oculusvr/shader/dist01_chroma.vp @@ -0,0 +1,33 @@ +//Copyright 2014 JogAmp Community. All rights reserved. + +#if __VERSION__ >= 130 + #define attribute in + #define varying out +#endif + +uniform vec2 ovr_EyeToSourceUVScale; +uniform vec2 ovr_EyeToSourceUVOffset; + +attribute vec2 ovr_Position; +attribute vec2 ovr_Params; +attribute vec2 ovr_TexCoordR; +attribute vec2 ovr_TexCoordG; +attribute vec2 ovr_TexCoordB; + +varying vec3 ovv_Fade; +varying vec2 ovv_TexCoordR; +varying vec2 ovv_TexCoordG; +varying vec2 ovv_TexCoordB; + +void main(void) +{ + gl_Position = vec4(ovr_Position.xy, 0.5, 1.0); + ovv_Fade = vec3(ovr_Params.r); // vignetteFade + + ovv_TexCoordR = ovr_TexCoordR * ovr_EyeToSourceUVScale + ovr_EyeToSourceUVOffset; + ovv_TexCoordR.y = 1.0-ovv_TexCoordR.y; + ovv_TexCoordG = ovr_TexCoordG * ovr_EyeToSourceUVScale + ovr_EyeToSourceUVOffset; + ovv_TexCoordG.y = 1.0-ovv_TexCoordG.y; + ovv_TexCoordB = ovr_TexCoordB * ovr_EyeToSourceUVScale + ovr_EyeToSourceUVOffset; + ovv_TexCoordB.y = 1.0-ovv_TexCoordB.y; +} diff --git a/src/oculusvr/classes/jogamp/opengl/oculusvr/shader/dist01_plain.fp b/src/oculusvr/classes/jogamp/opengl/oculusvr/shader/dist01_plain.fp new file mode 100644 index 000000000..d3ee5d04d --- /dev/null +++ b/src/oculusvr/classes/jogamp/opengl/oculusvr/shader/dist01_plain.fp @@ -0,0 +1,22 @@ +//Copyright 2014 JogAmp Community. All rights reserved. + +#if __VERSION__ >= 130 + #define varying in + out vec4 ovr_FragColor; + #define texture2D texture +#else + #define ovr_FragColor gl_FragColor +#endif + +uniform sampler2D ovr_Texture0; + +varying vec3 ovv_Fade; +varying vec2 ovv_TexCoordR; + +void main (void) +{ + // 3 samples for fixing chromatic aberrations + vec3 color = texture2D(ovr_Texture0, ovv_TexCoordR).rgb; + ovr_FragColor = vec4(ovv_Fade * color, 1.0); // include vignetteFade +} + diff --git a/src/oculusvr/classes/jogamp/opengl/oculusvr/shader/dist01_plain.vp b/src/oculusvr/classes/jogamp/opengl/oculusvr/shader/dist01_plain.vp new file mode 100644 index 000000000..6456c7a83 --- /dev/null +++ b/src/oculusvr/classes/jogamp/opengl/oculusvr/shader/dist01_plain.vp @@ -0,0 +1,27 @@ +//Copyright 2014 JogAmp Community. All rights reserved. + +#if __VERSION__ >= 130 + #define attribute in + #define varying out +#endif + +uniform vec2 ovr_EyeToSourceUVScale; +uniform vec2 ovr_EyeToSourceUVOffset; + +attribute vec2 ovr_Position; +attribute vec2 ovr_Params; +attribute vec2 ovr_TexCoordR; + +varying vec3 ovv_Fade; +varying vec2 ovv_TexCoordR; + +void main(void) +{ + gl_Position = vec4(ovr_Position.xy, 0.5, 1.0); + ovv_Fade = vec3(ovr_Params.r); // vignetteFade + + // Vertex inputs are in TanEyeAngle space for the R,G,B channels (i.e. after chromatic aberration and distortion). + // Scale them into the correct [0-1],[0-1] UV lookup space (depending on eye) + ovv_TexCoordR = ovr_TexCoordR * ovr_EyeToSourceUVScale + ovr_EyeToSourceUVOffset; + ovv_TexCoordR.y = 1.0-ovv_TexCoordR.y; +} diff --git a/src/oculusvr/classes/jogamp/opengl/oculusvr/shader/dist01_timewarp.vp b/src/oculusvr/classes/jogamp/opengl/oculusvr/shader/dist01_timewarp.vp new file mode 100644 index 000000000..3485625d2 --- /dev/null +++ b/src/oculusvr/classes/jogamp/opengl/oculusvr/shader/dist01_timewarp.vp @@ -0,0 +1,44 @@ +//Copyright 2014 JogAmp Community. All rights reserved. + +#if __VERSION__ >= 130 + #define attribute in + #define varying out +#endif + +uniform vec2 ovr_EyeToSourceUVScale; +uniform vec2 ovr_EyeToSourceUVOffset; +uniform mat4 ovr_EyeRotationStart; +uniform mat4 ovr_EyeRotationEnd; + +attribute vec2 ovr_Position; +attribute vec2 ovr_Params; +attribute vec2 ovr_TexCoordR; + +varying vec3 ovv_Fade; +varying vec2 ovv_TexCoordR; + +void main(void) +{ + gl_Position = vec4(ovr_Position.xy, 0.0, 1.0); + ovv_Fade = vec3(ovr_Params.r); // vignetteFade + + // Vertex inputs are in TanEyeAngle space for the R,G,B channels (i.e. after chromatic aberration and distortion). + // These are now "real world" vectors in direction (x,y,1) relative to the eye of the HMD. + vec3 TanEyeAngle = vec3 ( ovr_TexCoordR, 1.0 ); + + // Accurate time warp lerp vs. faster + // Apply the two 3x3 timewarp rotations to these vectors. + vec3 TransformedStart = (ovr_EyeRotationStart * vec4(TanEyeAngle, 0)).xyz; + vec3 TransformedEnd = (ovr_EyeRotationEnd * vec4(TanEyeAngle, 0)).xyz; + // And blend between them. + vec3 Transformed = mix ( TransformedStart, TransformedEnd, ovr_Params.g /* timewarpLerpFactor */ ); + + // Project them back onto the Z=1 plane of the rendered images. + float RecipZ = 1.0 / Transformed.z; + vec2 Flattened = vec2 ( Transformed.x * RecipZ, Transformed.y * RecipZ ); + + // These are now still in TanEyeAngle space. + // Scale them into the correct [0-1],[0-1] UV lookup space (depending on eye) + ovv_TexCoordR = Flattened * ovr_EyeToSourceUVScale + ovr_EyeToSourceUVOffset; + ovv_TexCoordR.y = 1.0-ovv_TexCoordR.y; +} diff --git a/src/oculusvr/classes/jogamp/opengl/oculusvr/shader/dist01_timewarp_chroma.vp b/src/oculusvr/classes/jogamp/opengl/oculusvr/shader/dist01_timewarp_chroma.vp new file mode 100644 index 000000000..e2a45e430 --- /dev/null +++ b/src/oculusvr/classes/jogamp/opengl/oculusvr/shader/dist01_timewarp_chroma.vp @@ -0,0 +1,65 @@ +//Copyright 2014 JogAmp Community. All rights reserved. + +#if __VERSION__ >= 130 + #define attribute in + #define varying out +#endif + +uniform vec2 ovr_EyeToSourceUVScale; +uniform vec2 ovr_EyeToSourceUVOffset; +uniform mat4 ovr_EyeRotationStart; +uniform mat4 ovr_EyeRotationEnd; + +attribute vec2 ovr_Position; +attribute vec2 ovr_Params; +attribute vec2 ovr_TexCoordR; +attribute vec2 ovr_TexCoordG; +attribute vec2 ovr_TexCoordB; + +varying vec3 ovv_Fade; +varying vec2 ovv_TexCoordR; +varying vec2 ovv_TexCoordG; +varying vec2 ovv_TexCoordB; + +void main(void) +{ + gl_Position = vec4(ovr_Position.xy, 0.0, 1.0); + ovv_Fade = vec3(ovr_Params.r); // vignetteFade + + // Vertex inputs are in TanEyeAngle space for the R,G,B channels (i.e. after chromatic aberration and distortion). + // These are now "real world" vectors in direction (x,y,1) relative to the eye of the HMD. + vec3 TanEyeAngleR = vec3 ( ovr_TexCoordR, 1.0 ); + vec3 TanEyeAngleG = vec3 ( ovr_TexCoordG, 1.0 ); + vec3 TanEyeAngleB = vec3 ( ovr_TexCoordB, 1.0 ); + + // Accurate time warp lerp vs. faster + // Apply the two 3x3 timewarp rotations to these vectors. + vec3 TransformedRStart = (ovr_EyeRotationStart * vec4(TanEyeAngleR, 0)).xyz; + vec3 TransformedGStart = (ovr_EyeRotationStart * vec4(TanEyeAngleG, 0)).xyz; + vec3 TransformedBStart = (ovr_EyeRotationStart * vec4(TanEyeAngleB, 0)).xyz; + vec3 TransformedREnd = (ovr_EyeRotationEnd * vec4(TanEyeAngleR, 0)).xyz; + vec3 TransformedGEnd = (ovr_EyeRotationEnd * vec4(TanEyeAngleG, 0)).xyz; + vec3 TransformedBEnd = (ovr_EyeRotationEnd * vec4(TanEyeAngleB, 0)).xyz; + + // And blend between them. + vec3 TransformedR = mix ( TransformedRStart, TransformedREnd, ovr_Params.g /* timewarpLerpFactor */ ); + vec3 TransformedG = mix ( TransformedGStart, TransformedGEnd, ovr_Params.g /* timewarpLerpFactor */ ); + vec3 TransformedB = mix ( TransformedBStart, TransformedBEnd, ovr_Params.g /* timewarpLerpFactor */ ); + + // Project them back onto the Z=1 plane of the rendered images. + float RecipZR = 1.0 / TransformedR.z; + float RecipZG = 1.0 / TransformedG.z; + float RecipZB = 1.0 / TransformedB.z; + vec2 FlattenedR = vec2 ( TransformedR.x * RecipZR, TransformedR.y * RecipZR ); + vec2 FlattenedG = vec2 ( TransformedG.x * RecipZG, TransformedG.y * RecipZG ); + vec2 FlattenedB = vec2 ( TransformedB.x * RecipZB, TransformedB.y * RecipZB ); + + // These are now still in TanEyeAngle space. + // Scale them into the correct [0-1],[0-1] UV lookup space (depending on eye) + ovv_TexCoordR = FlattenedR * ovr_EyeToSourceUVScale + ovr_EyeToSourceUVOffset; + ovv_TexCoordR.y = 1.0-ovv_TexCoordR.y; + ovv_TexCoordG = FlattenedG * ovr_EyeToSourceUVScale + ovr_EyeToSourceUVOffset; + ovv_TexCoordG.y = 1.0-ovv_TexCoordG.y; + ovv_TexCoordB = FlattenedB * ovr_EyeToSourceUVScale + ovr_EyeToSourceUVOffset; + ovv_TexCoordB.y = 1.0-ovv_TexCoordB.y; +} |