aboutsummaryrefslogtreecommitdiffstats
path: root/src
diff options
context:
space:
mode:
authorSven Gothel <[email protected]>2014-07-05 04:04:43 +0200
committerSven Gothel <[email protected]>2014-07-05 04:04:43 +0200
commitf8f0f051604721bceaee214b8e5218fd47d2eb9e (patch)
treee9c3103498984fbc5d2f8567e88009c49e8810f7 /src
parent2293a53ba04a8cf2881e8919f8be97c16a9af336 (diff)
Bug 1021: Make OVR access vendor agnostic: Package 'com.jogamp.opengl.util.stereo' contains all public interfaces/classes
Renamed interfaces: CustomRendererListener -> CustomGLEventListener StereoRendererListener -> StereoGLEventListener New vendor agnostic 'stuff' in com.jogamp.opengl.util.stereo: 1 - StereoDeviceFactory To create a vendor specific StereoDeviceFactory instance, which creates the StereoDevice. 2 - StereoDevice For vendor specific implementation. Can create StereoDeviceRenderer. 3 - StereoDeviceRenderer For vendor specific implementation. 4 - StereoClientRenderer Vendor agnostic client StereoGLEventListener renderer, using a StereoDeviceRenderer. Now supports multiple StereoGLEventListener, via add/remove. - MovieSBSStereo demo-able via StereoDemo01 can show SBS 3D movies.
Diffstat (limited to 'src')
-rw-r--r--src/jogl/classes/com/jogamp/opengl/util/CustomGLEventListener.java (renamed from src/jogl/classes/com/jogamp/opengl/util/CustomRendererListener.java)2
-rw-r--r--src/jogl/classes/com/jogamp/opengl/util/stereo/EyeParameter.java2
-rw-r--r--src/jogl/classes/com/jogamp/opengl/util/stereo/EyePose.java2
-rw-r--r--src/jogl/classes/com/jogamp/opengl/util/stereo/StereoClientRenderer.java255
-rw-r--r--src/jogl/classes/com/jogamp/opengl/util/stereo/StereoDevice.java93
-rw-r--r--src/jogl/classes/com/jogamp/opengl/util/stereo/StereoDeviceFactory.java64
-rw-r--r--src/jogl/classes/com/jogamp/opengl/util/stereo/StereoDeviceRenderer.java235
-rw-r--r--src/jogl/classes/com/jogamp/opengl/util/stereo/StereoGLEventListener.java (renamed from src/jogl/classes/com/jogamp/opengl/util/stereo/StereoRendererListener.java)13
-rw-r--r--src/jogl/classes/com/jogamp/opengl/util/stereo/StereoUtil.java125
-rw-r--r--src/jogl/classes/jogamp/opengl/GLDrawableHelper.java27
-rw-r--r--src/jogl/classes/jogamp/opengl/util/av/GLMediaPlayerImpl.java1
-rw-r--r--src/oculusvr/classes/com/jogamp/opengl/oculusvr/OVRSBSRendererDualFBO.java233
-rw-r--r--src/oculusvr/classes/com/jogamp/opengl/oculusvr/OVRSBSRendererSingleFBO.java206
-rw-r--r--src/oculusvr/classes/jogamp/opengl/oculusvr/OVRDistortion.java690
-rw-r--r--src/oculusvr/classes/jogamp/opengl/oculusvr/OVRStereoDevice.java158
-rw-r--r--src/oculusvr/classes/jogamp/opengl/oculusvr/OVRStereoDeviceFactory.java51
-rw-r--r--src/oculusvr/classes/jogamp/opengl/oculusvr/OVRStereoDeviceRenderer.java590
-rw-r--r--src/oculusvr/classes/jogamp/opengl/oculusvr/OVRUtil.java60
-rw-r--r--src/test/com/jogamp/opengl/test/junit/jogl/demos/es2/GearsES2.java14
-rw-r--r--src/test/com/jogamp/opengl/test/junit/jogl/demos/es2/av/MovieSBSStereo.java859
-rw-r--r--src/test/com/jogamp/opengl/test/junit/jogl/demos/es2/av/MovieSimple.java14
-rw-r--r--src/test/com/jogamp/opengl/test/junit/jogl/stereo/StereoDemo01.java307
-rw-r--r--src/test/com/jogamp/opengl/test/junit/jogl/stereo/ovr/OVRDemo01.java236
23 files changed, 2843 insertions, 1394 deletions
diff --git a/src/jogl/classes/com/jogamp/opengl/util/CustomRendererListener.java b/src/jogl/classes/com/jogamp/opengl/util/CustomGLEventListener.java
index 0e6de5178..86443087e 100644
--- a/src/jogl/classes/com/jogamp/opengl/util/CustomRendererListener.java
+++ b/src/jogl/classes/com/jogamp/opengl/util/CustomGLEventListener.java
@@ -34,7 +34,7 @@ import javax.media.opengl.GLEventListener;
* Extended {@link GLEventListener} interface
* supporting more fine grained control over the implementation.
*/
-public interface CustomRendererListener extends GLEventListener {
+public interface CustomGLEventListener extends GLEventListener {
/**
* {@link #display(GLAutoDrawable, int) display flag}: Repeat last produced image.
* <p>
diff --git a/src/jogl/classes/com/jogamp/opengl/util/stereo/EyeParameter.java b/src/jogl/classes/com/jogamp/opengl/util/stereo/EyeParameter.java
index 7774d67e2..075da340b 100644
--- a/src/jogl/classes/com/jogamp/opengl/util/stereo/EyeParameter.java
+++ b/src/jogl/classes/com/jogamp/opengl/util/stereo/EyeParameter.java
@@ -62,7 +62,7 @@ public final class EyeParameter {
this.eyeReliefZ = eyeRelief;
}
public final String toString() {
- return "EyeParam[num"+number+", posOff["+positionOffset[0]+", "+positionOffset[1]+", "+positionOffset[2]+"], "+fovhv+
+ return "EyeParam[num "+number+", posOff["+positionOffset[0]+", "+positionOffset[1]+", "+positionOffset[2]+"], "+fovhv+
", distPupil[noseX "+distNoseToPupilX+", middleY "+distMiddleToPupilY+", reliefZ "+eyeReliefZ+"]]";
}
} \ No newline at end of file
diff --git a/src/jogl/classes/com/jogamp/opengl/util/stereo/EyePose.java b/src/jogl/classes/com/jogamp/opengl/util/stereo/EyePose.java
index 2690097f1..aa64ff130 100644
--- a/src/jogl/classes/com/jogamp/opengl/util/stereo/EyePose.java
+++ b/src/jogl/classes/com/jogamp/opengl/util/stereo/EyePose.java
@@ -64,6 +64,6 @@ public final class EyePose {
position[2] = posZ;
}
public final String toString() {
- return "EyePose[num"+number+", pos["+position[0]+", "+position[1]+", "+position[2]+"], "+orientation+"]";
+ return "EyePose[num "+number+", pos["+position[0]+", "+position[1]+", "+position[2]+"], "+orientation+"]";
}
} \ No newline at end of file
diff --git a/src/jogl/classes/com/jogamp/opengl/util/stereo/StereoClientRenderer.java b/src/jogl/classes/com/jogamp/opengl/util/stereo/StereoClientRenderer.java
new file mode 100644
index 000000000..9f9ebdf2a
--- /dev/null
+++ b/src/jogl/classes/com/jogamp/opengl/util/stereo/StereoClientRenderer.java
@@ -0,0 +1,255 @@
+/**
+ * Copyright 2014 JogAmp Community. All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or without modification, are
+ * permitted provided that the following conditions are met:
+ *
+ * 1. Redistributions of source code must retain the above copyright notice, this list of
+ * conditions and the following disclaimer.
+ *
+ * 2. Redistributions in binary form must reproduce the above copyright notice, this list
+ * of conditions and the following disclaimer in the documentation and/or other materials
+ * provided with the distribution.
+ *
+ * THIS SOFTWARE IS PROVIDED BY JogAmp Community ``AS IS'' AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND
+ * FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL JogAmp Community OR
+ * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+ * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
+ * SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON
+ * ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
+ * NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
+ * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ *
+ * The views and conclusions contained in the software and documentation are those of the
+ * authors and should not be interpreted as representing official policies, either expressed
+ * or implied, of JogAmp Community.
+ */
+package com.jogamp.opengl.util.stereo;
+
+import javax.media.nativewindow.util.DimensionImmutable;
+import javax.media.nativewindow.util.RectangleImmutable;
+import javax.media.opengl.GL;
+import javax.media.opengl.GL2ES2;
+import javax.media.opengl.GLAutoDrawable;
+import javax.media.opengl.GLEventListener;
+
+import jogamp.opengl.GLDrawableHelper;
+import jogamp.opengl.GLDrawableHelper.GLEventListenerAction;
+
+import com.jogamp.opengl.FBObject;
+import com.jogamp.opengl.FBObject.Attachment;
+import com.jogamp.opengl.FBObject.TextureAttachment;
+import com.jogamp.opengl.FBObject.Attachment.Type;
+import com.jogamp.opengl.util.CustomGLEventListener;
+
+/**
+ * {@link StereoClientRenderer} utilizing {@link StereoDeviceRenderer}
+ * implementing {@link GLEventListener} for convenience.
+ * <p>
+ * Implementation renders {@link StereoGLEventListener}
+ * using one or more {@link FBObject} according to {@link StereoDeviceRenderer#getTextureCount()}.
+ * </p>
+ */
+public class StereoClientRenderer implements GLEventListener {
+ private final GLDrawableHelper helper;
+ private final StereoDeviceRenderer deviceRenderer;
+ private final boolean ownsDevice;
+ private final FBObject[] fbos;
+ private final int magFilter;
+ private final int minFilter;
+ private final boolean usePostprocessing;
+
+ private int numSamples;
+ private final TextureAttachment[] fboTexs;
+
+ public StereoClientRenderer(final StereoDeviceRenderer deviceRenderer, final boolean ownsDevice,
+ final int magFilter, final int minFilter, final int numSamples) {
+ final int fboCount = deviceRenderer.getTextureCount();
+ if( 0 > fboCount || 2 < fboCount ) {
+ throw new IllegalArgumentException("fboCount must be within [0..2], has "+fboCount+", due to "+deviceRenderer);
+ }
+ this.helper = new GLDrawableHelper();
+ this.deviceRenderer = deviceRenderer;
+ this.usePostprocessing = deviceRenderer.ppRequired() || deviceRenderer.usesSideBySideStereo() && fboCount > 1;
+ this.ownsDevice = ownsDevice;
+ this.magFilter = magFilter;
+ this.minFilter = minFilter;
+
+ this.numSamples = numSamples;
+
+ this.fbos = new FBObject[fboCount];
+ for(int i=0; i<fboCount; i++) {
+ this.fbos[i] = new FBObject();
+ }
+ this.fboTexs = new TextureAttachment[fboCount];
+ }
+
+ private void initFBOs(final GL gl, final DimensionImmutable size) {
+ for(int i=0; i<fbos.length; i++) {
+ fbos[i].detachAllColorbuffer(gl);
+ fbos[i].reset(gl, size.getWidth(), size.getHeight(), numSamples, false);
+ if( i>0 && fbos[i-1].getNumSamples() != fbos[i].getNumSamples()) {
+ throw new InternalError("sample size mismatch: \n\t0: "+fbos[i-1]+"\n\t1: "+fbos[i]);
+ }
+ numSamples = fbos[i].getNumSamples();
+
+ if(numSamples>0) {
+ fbos[i].attachColorbuffer(gl, 0, true); // MSAA requires alpha
+ fbos[i].attachRenderbuffer(gl, Type.DEPTH, 24);
+ final FBObject ssink = new FBObject();
+ {
+ ssink.reset(gl, size.getWidth(), size.getHeight());
+ ssink.attachTexture2D(gl, 0, false, magFilter, minFilter, GL.GL_CLAMP_TO_EDGE, GL.GL_CLAMP_TO_EDGE);
+ ssink.attachRenderbuffer(gl, Attachment.Type.DEPTH, 24);
+ }
+ fbos[i].setSamplingSink(ssink);
+ fbos[i].resetSamplingSink(gl); // validate
+ fboTexs[i] = fbos[i].getSamplingSink();
+ } else {
+ fboTexs[i] = fbos[i].attachTexture2D(gl, 0, false, magFilter, minFilter, GL.GL_CLAMP_TO_EDGE, GL.GL_CLAMP_TO_EDGE);
+ fbos[i].attachRenderbuffer(gl, Type.DEPTH, 24);
+ }
+ fbos[i].unbind(gl);
+ System.err.println("FBO["+i+"]: "+fbos[i]);
+ }
+
+ }
+
+ @SuppressWarnings("unused")
+ private void resetFBOs(final GL gl, final DimensionImmutable size) {
+ for(int i=0; i<fbos.length; i++) {
+ fbos[i].reset(gl, size.getWidth(), size.getHeight(), numSamples, true);
+ if( i>0 && fbos[i-1].getNumSamples() != fbos[i].getNumSamples()) {
+ throw new InternalError("sample size mismatch: \n\t0: "+fbos[i-1]+"\n\t1: "+fbos[i]);
+ }
+ numSamples = fbos[i].getNumSamples();
+ if(numSamples>0) {
+ fboTexs[i] = fbos[i].getSamplingSink();
+ } else {
+ fboTexs[i] = (TextureAttachment) fbos[i].getColorbuffer(0);
+ }
+ }
+ }
+
+ public final StereoDeviceRenderer getStereoDeviceRenderer() { return deviceRenderer; }
+
+ public final void addGLEventListener(final StereoGLEventListener l) {
+ helper.addGLEventListener(l);
+ }
+ public final void removeGLEventListener(final StereoGLEventListener l) {
+ helper.removeGLEventListener(l);
+ }
+
+ @Override
+ public void init(final GLAutoDrawable drawable) {
+ final GL2ES2 gl = drawable.getGL().getGL2ES2();
+ deviceRenderer.init(gl);
+
+ // We will do some offscreen rendering, setup FBO...
+ final DimensionImmutable textureSize = deviceRenderer.getTextureCount() > 1 ? deviceRenderer.getSingleSurfaceSize() : deviceRenderer.getTotalSurfaceSize();
+ initFBOs(gl, textureSize);
+ helper.init(drawable, false);
+
+ gl.setSwapInterval(1);
+ }
+
+ @Override
+ public void dispose(final GLAutoDrawable drawable) {
+ final GL2ES2 gl = drawable.getGL().getGL2ES2();
+ helper.disposeAllGLEventListener(drawable, false);
+ for(int i=0; i<fbos.length; i++) {
+ fbos[i].destroy(gl);
+ fboTexs[i] = null;
+ }
+ if( ownsDevice ) {
+ deviceRenderer.dispose(gl);
+ }
+ }
+
+ @Override
+ public void display(final GLAutoDrawable drawable) {
+ final GL2ES2 gl = drawable.getGL().getGL2ES2();
+
+ deviceRenderer.beginFrame(gl);
+
+ if(0 < numSamples) {
+ gl.glEnable(GL.GL_MULTISAMPLE);
+ }
+
+ final int fboCount = fbos.length;
+ final int displayRepeatFlags;
+ if( 1 == fboCount ) {
+ displayRepeatFlags = CustomGLEventListener.DISPLAY_DONTCLEAR;
+ } else {
+ displayRepeatFlags = 0;
+ }
+
+ // Update eye pos upfront to have same (almost) results
+ deviceRenderer.updateEyePose(0);
+ deviceRenderer.updateEyePose(1);
+
+ if( 1 == fboCount ) {
+ fbos[0].bind(gl);
+ }
+
+ for(int eyeNum=0; eyeNum<2; eyeNum++) {
+ if( 1 < fboCount ) {
+ fbos[eyeNum].bind(gl);
+ }
+
+ final StereoDeviceRenderer.Eye eye = deviceRenderer.getEye(eyeNum);
+ final RectangleImmutable viewport = eye.getViewport();
+ gl.glViewport(viewport.getX(), viewport.getY(), viewport.getWidth(), viewport.getHeight());
+
+ final int displayFlags = eyeNum > 0 ? CustomGLEventListener.DISPLAY_REPEAT | displayRepeatFlags : 0;
+ final GLEventListenerAction reshapeDisplayAction = new GLEventListenerAction() {
+ public void run(final GLAutoDrawable drawable, final GLEventListener listener) {
+ final StereoGLEventListener sl = (StereoGLEventListener) listener;
+ sl.reshapeForEye(drawable, viewport.getX(), viewport.getY(), viewport.getWidth(), viewport.getHeight(),
+ eye.getEyeParameter(), eye.getLastEyePose());
+ sl.display(drawable, displayFlags);
+ } };
+ helper.runForAllGLEventListener(drawable, reshapeDisplayAction);
+
+ if( 1 < fboCount ) {
+ fbos[eyeNum].unbind(gl);
+ }
+ }
+ if( 1 == fboCount ) {
+ fbos[0].unbind(gl);
+ }
+ // restore viewport
+ gl.glViewport(0, 0, drawable.getSurfaceWidth(), drawable.getSurfaceHeight());
+
+ if( usePostprocessing ) {
+ deviceRenderer.ppBegin(gl);
+ if( 1 == fboCount ) {
+ fbos[0].use(gl, fboTexs[0]);
+ deviceRenderer.ppBothEyes(gl);
+ fbos[0].unuse(gl);
+ } else {
+ fbos[0].use(gl, fboTexs[0]);
+ deviceRenderer.ppOneEye(gl, 0);
+ fbos[0].unuse(gl);
+ fbos[1].use(gl, fboTexs[1]);
+ deviceRenderer.ppOneEye(gl, 1);
+ fbos[1].unuse(gl);
+ }
+ deviceRenderer.ppEnd(gl);
+ }
+
+ if( !drawable.getAutoSwapBufferMode() ) {
+ drawable.swapBuffers();
+ }
+ deviceRenderer.endFrame(gl);
+ }
+
+ @Override
+ public void reshape(final GLAutoDrawable drawable, final int x, final int y, final int width, final int height) {
+ if( !drawable.getAutoSwapBufferMode() ) {
+ final GL2ES2 gl = drawable.getGL().getGL2ES2();
+ gl.glViewport(0, 0, width, height);
+ }
+ }
+}
diff --git a/src/jogl/classes/com/jogamp/opengl/util/stereo/StereoDevice.java b/src/jogl/classes/com/jogamp/opengl/util/stereo/StereoDevice.java
new file mode 100644
index 000000000..e5c0e3646
--- /dev/null
+++ b/src/jogl/classes/com/jogamp/opengl/util/stereo/StereoDevice.java
@@ -0,0 +1,93 @@
+/**
+ * Copyright 2014 JogAmp Community. All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or without modification, are
+ * permitted provided that the following conditions are met:
+ *
+ * 1. Redistributions of source code must retain the above copyright notice, this list of
+ * conditions and the following disclaimer.
+ *
+ * 2. Redistributions in binary form must reproduce the above copyright notice, this list
+ * of conditions and the following disclaimer in the documentation and/or other materials
+ * provided with the distribution.
+ *
+ * THIS SOFTWARE IS PROVIDED BY JogAmp Community ``AS IS'' AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND
+ * FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL JogAmp Community OR
+ * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+ * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
+ * SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON
+ * ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
+ * NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
+ * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ *
+ * The views and conclusions contained in the software and documentation are those of the
+ * authors and should not be interpreted as representing official policies, either expressed
+ * or implied, of JogAmp Community.
+ */
+package com.jogamp.opengl.util.stereo;
+
+import javax.media.nativewindow.util.DimensionImmutable;
+import javax.media.nativewindow.util.PointImmutable;
+
+import jogamp.opengl.Debug;
+
+import com.jogamp.opengl.math.FovHVHalves;
+
+/**
+ * Interface describing a native stereoscopic device
+ */
+public interface StereoDevice {
+ public static final boolean DEBUG = Debug.debug("StereoDevice");
+
+ /**
+ * Default eye position offset for {@link #createRenderer(int, int, float[], FovHVHalves[], float)}.
+ * <p>
+ * Default offset is 1.6f <i>up</i> and 5.0f <i>away</i>.
+ * </p>
+ */
+ public static final float[] DEFAULT_EYE_POSITION_OFFSET = { 0.0f, 1.6f, -5.0f };
+
+ /** Disposes this {@link StereoDevice}. */
+ public void dispose();
+
+ /**
+ * If operation within a device spanning virtual desktop,
+ * returns the device position.
+ * <p>
+ * Otherwise simply 0/0.
+ * </p>
+ */
+ public PointImmutable getPosition();
+
+ /**
+ * Returns the required surface size in pixel.
+ */
+ public DimensionImmutable getSurfaceSize();
+
+ /**
+ * Returns the device default {@link FovHVHalves} per eye.
+ */
+ public FovHVHalves[] getDefaultFOV();
+
+ /** Start or stop sensors. Returns true if action was successful, otherwise false. */
+ public boolean startSensors(boolean start);
+
+ /** Return true if sensors have been started, false otherwise */
+ public boolean getSensorsStarted();
+
+ /**
+ * Create a new {@link StereoDeviceRenderer} instance.
+ *
+ * @param distortionBits {@link StereoDeviceRenderer} distortion bits, e.g. {@link StereoDeviceRenderer#DISTORTION_BARREL}, etc.
+ * @param textureCount desired texture count for post-processing, see {@link StereoDeviceRenderer#getTextureCount()} and {@link StereoDeviceRenderer#ppRequired()}
+ * @param eyePositionOffset eye position offset, e.g. {@link #DEFAULT_EYE_POSITION_OFFSET}.
+ * @param eyeFov FovHVHalves[2] field-of-view per eye
+ * @param pixelsPerDisplayPixel
+ * @param textureUnit
+ * @return
+ */
+ public StereoDeviceRenderer createRenderer(final int distortionBits,
+ final int textureCount, final float[] eyePositionOffset,
+ final FovHVHalves[] eyeFov, final float pixelsPerDisplayPixel, final int textureUnit);
+}
diff --git a/src/jogl/classes/com/jogamp/opengl/util/stereo/StereoDeviceFactory.java b/src/jogl/classes/com/jogamp/opengl/util/stereo/StereoDeviceFactory.java
new file mode 100644
index 000000000..d9054ce28
--- /dev/null
+++ b/src/jogl/classes/com/jogamp/opengl/util/stereo/StereoDeviceFactory.java
@@ -0,0 +1,64 @@
+/**
+ * Copyright 2014 JogAmp Community. All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or without modification, are
+ * permitted provided that the following conditions are met:
+ *
+ * 1. Redistributions of source code must retain the above copyright notice, this list of
+ * conditions and the following disclaimer.
+ *
+ * 2. Redistributions in binary form must reproduce the above copyright notice, this list
+ * of conditions and the following disclaimer in the documentation and/or other materials
+ * provided with the distribution.
+ *
+ * THIS SOFTWARE IS PROVIDED BY JogAmp Community ``AS IS'' AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND
+ * FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL JogAmp Community OR
+ * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+ * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
+ * SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON
+ * ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
+ * NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
+ * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ *
+ * The views and conclusions contained in the software and documentation are those of the
+ * authors and should not be interpreted as representing official policies, either expressed
+ * or implied, of JogAmp Community.
+ */
+package com.jogamp.opengl.util.stereo;
+
+import com.jogamp.common.util.ReflectionUtil;
+
+/**
+ * Platform agnostic {@link StereoDevice} factory.
+ */
+public abstract class StereoDeviceFactory {
+ private static final String OVRStereoDeviceClazzName = "jogamp.opengl.oculusvr.OVRStereoDeviceFactory";
+ private static final Object[] ctorArgs;
+ private static final String isAvailableMethodName = "isAvailable";
+
+ static {
+ ctorArgs = new Object[6];
+ ctorArgs[0] = null;
+
+ }
+ public static StereoDeviceFactory createDefaultFactory() {
+ final ClassLoader cl = StereoDeviceFactory.class.getClassLoader();
+ final StereoDeviceFactory sink = createFactory(cl, OVRStereoDeviceClazzName);
+ if( null == sink ) {
+ // sink = create(cl, ANYOTHERCLAZZNAME);
+ }
+ return sink;
+ }
+
+ public static StereoDeviceFactory createFactory(final ClassLoader cl, final String implName) {
+ try {
+ if(((Boolean)ReflectionUtil.callStaticMethod(implName, isAvailableMethodName, null, null, cl)).booleanValue()) {
+ return (StereoDeviceFactory) ReflectionUtil.createInstance(implName, cl);
+ }
+ } catch (final Throwable t) { if(StereoDevice.DEBUG) { System.err.println("Caught "+t.getClass().getName()+": "+t.getMessage()); t.printStackTrace(); } }
+ return null;
+ }
+
+ public abstract StereoDevice createDevice(final int deviceIndex, final boolean verbose);
+}
diff --git a/src/jogl/classes/com/jogamp/opengl/util/stereo/StereoDeviceRenderer.java b/src/jogl/classes/com/jogamp/opengl/util/stereo/StereoDeviceRenderer.java
new file mode 100644
index 000000000..fd94f6bc3
--- /dev/null
+++ b/src/jogl/classes/com/jogamp/opengl/util/stereo/StereoDeviceRenderer.java
@@ -0,0 +1,235 @@
+/**
+ * Copyright 2014 JogAmp Community. All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or without modification, are
+ * permitted provided that the following conditions are met:
+ *
+ * 1. Redistributions of source code must retain the above copyright notice, this list of
+ * conditions and the following disclaimer.
+ *
+ * 2. Redistributions in binary form must reproduce the above copyright notice, this list
+ * of conditions and the following disclaimer in the documentation and/or other materials
+ * provided with the distribution.
+ *
+ * THIS SOFTWARE IS PROVIDED BY JogAmp Community ``AS IS'' AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND
+ * FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL JogAmp Community OR
+ * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+ * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
+ * SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON
+ * ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
+ * NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
+ * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ *
+ * The views and conclusions contained in the software and documentation are those of the
+ * authors and should not be interpreted as representing official policies, either expressed
+ * or implied, of JogAmp Community.
+ */
+package com.jogamp.opengl.util.stereo;
+
+import javax.media.nativewindow.util.DimensionImmutable;
+import javax.media.nativewindow.util.RectangleImmutable;
+import javax.media.opengl.GL;
+
+/**
+ * Stereoscopic device rendering interface.
+ * <p>
+ * The following pseudo-code describes how to implement a renderer
+ * using a {@link StereoDeviceRenderer}.
+ * See {@link StereoClientRenderer} which implements the following:
+ * <ul>
+ * <li>device.{@link #beginFrame(GL)}</li>
+ * <li>For both eyes:<ul>
+ * <li>device.{@link #updateEyePose(int)}</li>
+ * <li>if device.{@link #ppRequired()}: Set the render target, e.g. FBO</li>
+ * <li>Set the viewport using {@link Eye#getViewport()}</li>
+ * <li>{@link StereoGLEventListener#reshapeForEye(javax.media.opengl.GLAutoDrawable, int, int, int, int, EyeParameter, EyePose) upstream.reshapeEye(..)}</li>
+ * <li>{@link StereoGLEventListener#display(javax.media.opengl.GLAutoDrawable, int) upstream.display(..)}.</li>
+ * </ul></li>
+ * <li>Reset the viewport</li>
+ * <li>If device.{@link #ppRequired()}:<ul>
+ * <li>device.{@link #ppBegin(GL)}</li>
+ * <li>Use render target, e.g. FBO's texture</li>
+ * <li>device.{@link #ppBothEyes(GL)} or device.{@link #ppOneEye(GL, int)} for both eyes</li>
+ * <li>device.{@link #ppEnd(GL)}</li>
+ * </ul></li>
+ * <li>device.{@link #endFrame(GL)}</li>
+ * <ul>
+ */
+public interface StereoDeviceRenderer {
+ /**
+ * Distortion Bit: Barrel distortion compensating lens pincushion distortion
+ */
+ public static final int DISTORTION_BARREL = 1 << 0;
+
+ /**
+ * Distortion Bit: Chromatic distortion compensating lens chromatic aberration.
+ */
+ public static final int DISTORTION_CHROMATIC = 1 << 1;
+
+ /**
+ * Distortion Bit: Vignette distortion compensating lens chromatic aberration.
+ */
+ public static final int DISTORTION_VIGNETTE = 1 << 2;
+
+ /**
+ * Distortion Bit: Timewarp distortion technique to predict
+ * {@link EyePose} movement to reduce latency.
+ * <p>
+ * FIXME: Explanation needs refinement!
+ * </p>
+ */
+ public static final int DISTORTION_TIMEWARP = 1 << 3;
+
+
+ /** Returns the {@link StereoDevice} of this {@link StereoDeviceRenderer} instance. */
+ public StereoDevice getDevice();
+
+ /**
+ * Interface describing one eye of the stereoscopic device,
+ * see {@link StereoDeviceRenderer#getEye(int)}.
+ */
+ public static interface Eye {
+ /**
+ * Returns the viewport for this eye.
+ */
+ public RectangleImmutable getViewport();
+ /**
+ * Returns the {@link EyeParameter} of this eye.
+ */
+ public EyeParameter getEyeParameter();
+ /**
+ * Returns the last {@link EyePose} of this eye.
+ */
+ public EyePose getLastEyePose();
+ }
+
+ /**
+ * Returns the {@link Eye} instance for the denoted <code>eyeNum</code>.
+ */
+ public Eye getEye(final int eyeNum);
+
+ /**
+ * Updates the {@link Eye#getLastEyePose()}
+ * for the denoted <code>eyeNum</code>.
+ */
+ public EyePose updateEyePose(final int eyeNum);
+
+ /**
+ * Returns distortion compensation bits, e.g. {@link #DISTORTION_BARREL},
+ * in case the stereoscopic display requires such, i.e. in case lenses are utilized.
+ * <p>
+ * Distortion requires {@link #ppRequired() post-processing}.
+ * </p>
+ */
+ public int getDistortionBits();
+
+ /**
+ * Method returns <code>true</code> if using <i>side-by-side</i> (SBS)
+ * stereoscopic images, otherwise <code>false</code>.
+ * <p>
+ * SBS requires that both eye's images are presented
+ * <i>side-by-side</i> in the final framebuffer.
+ * </p>
+ * <p>
+ * Either the renderer presents the images <i>side-by-side</i> according to the {@link Eye#getViewport() eye's viewport},
+ * or {@link #ppRequired() post-processing} is utilized to merge {@link #getTextureCount() textures}
+ * to a <i>side-by-side</i> configuration.
+ * </p>
+ */
+ public boolean usesSideBySideStereo();
+
+ /**
+ * Returns the unified surface size of one eye's a single image in pixel units.
+ */
+ public DimensionImmutable getSingleSurfaceSize();
+
+ /**
+ * Returns the total surface size required for the complete images in pixel units.
+ * <p>
+ * If {@link #usesSideBySideStereo()} the total size spans over both {@link #getSingleSurfaceSize()}, side-by-side.
+ * </p>
+ * <p>
+ * Otherwise the size is equal to {@link #getSingleSurfaceSize()}.
+ * </p>
+ */
+ public DimensionImmutable getTotalSurfaceSize();
+
+ /**
+ * Returns the used texture-image count for post-processing, see {@link #ppRequired()}.
+ * <p>
+ * In case the renderer does not support multiple textures for post-processing,
+ * or no post-processing at all, method returns zero despite the request
+ * from {@link StereoDevice#createRenderer(int, int, float[], com.jogamp.opengl.math.FovHVHalves[], float)}.
+ * </p>
+ */
+ public int getTextureCount();
+
+ /** Returns the desired texture-image unit for post-processing, see {@link #ppRequired()}. */
+ public int getTextureUnit();
+
+ /** Initialize OpenGL related resources */
+ public void init(final GL gl);
+
+ /** Release all OpenGL related resources */
+ public void dispose(final GL gl);
+
+ /** Notifying that a new frame is about to start. */
+ public void beginFrame(final GL gl);
+
+ /** Notifying that the frame has been rendered completely. */
+ public void endFrame(final GL gl);
+
+ /**
+ * Returns <code>true</code> if stereoscopic post-processing is required,
+ * otherwise <code>false</code>.
+ * <p>
+ * Stereoscopic post-processing is usually required if:
+ * <ul>
+ * <li>one of the <i>distortion</i> modes are set, i.e. {@link #usesBarrelDistortion()}</li>
+ * <li>texture-images are being used, see {@link #getTextureCount()}</li>
+ * </ul>
+ * </p>
+ * <p>
+ * If stereoscopic post-processing is used
+ * the following post-processing methods must be called to before {@link #endFrame()}:
+ * <ul>
+ * <li>{@link #ppBegin(GL)}</li>
+ * <li>{@link #ppBothEyes(GL)} or {@link #ppOneEye(GL, int)} for both eyes</li>
+ * <li>{@link #ppEnd(GL)}</li>
+ * </ul>
+ * </p>
+ */
+ public boolean ppRequired();
+
+ /**
+ * Begin stereoscopic post-processing, see {@link #ppRequired()}.
+ * <p>
+ * {@link #updateEyePose(int)} for both eyes must be called upfront
+ * when rendering upstream {@link StereoGLEventListener}.
+ * </p>
+ *
+ * @param gl
+ */
+ public void ppBegin(final GL gl);
+
+ /**
+ * Performs stereoscopic post-processing for both eyes, see {@link #ppRequired()}.
+ * @param gl
+ */
+ public void ppBothEyes(final GL gl);
+
+ /**
+ * Performs stereoscopic post-processing for one eye, see {@link #ppRequired()}.
+ * @param gl
+ * @param eyeNum
+ */
+ public void ppOneEye(final GL gl, final int eyeNum);
+
+ /**
+ * End stereoscopic post-processing, see {@link #ppRequired()}.
+ * @param gl
+ */
+ public void ppEnd(final GL gl);
+
+}
diff --git a/src/jogl/classes/com/jogamp/opengl/util/stereo/StereoRendererListener.java b/src/jogl/classes/com/jogamp/opengl/util/stereo/StereoGLEventListener.java
index 5e6e40a08..ec580cbf9 100644
--- a/src/jogl/classes/com/jogamp/opengl/util/stereo/StereoRendererListener.java
+++ b/src/jogl/classes/com/jogamp/opengl/util/stereo/StereoGLEventListener.java
@@ -31,15 +31,16 @@ import javax.media.opengl.GLAutoDrawable;
import javax.media.opengl.GLEventListener;
import com.jogamp.opengl.math.FloatUtil;
-import com.jogamp.opengl.util.CustomRendererListener;
+import com.jogamp.opengl.util.CustomGLEventListener;
/**
- * Extended {@link GLEventListener} and {@link CustomRendererListener} interface
+ * Extended {@link GLEventListener} and {@link CustomGLEventListener} interface
* supporting stereoscopic client rendering.
*/
-public interface StereoRendererListener extends CustomRendererListener {
+public interface StereoGLEventListener extends CustomGLEventListener {
/**
- * Stereo capable specialization of {@link #reshape(GLAutoDrawable, int, int, int, int)}.
+ * Stereo capable specialization of {@link #reshape(GLAutoDrawable, int, int, int, int)}
+ * for one {@link StereoDeviceRenderer.Eye}.
* <p>
* Called by the stereo renderer before each {@link #display(GLAutoDrawable)}
* or {@link #display(GLAutoDrawable, int)} call.
@@ -66,8 +67,8 @@ public interface StereoRendererListener extends CustomRendererListener {
* @param eyePose current eye position and orientation
* @see FloatUtil#makePerspective(float[], int, boolean, com.jogamp.opengl.math.FloatUtil.FovHVHalves, float, float)
*/
- public void reshapeEye(final GLAutoDrawable drawable, final int x, final int y, final int width, final int height,
- final EyeParameter eyeParam, final EyePose eyePose);
+ public void reshapeForEye(final GLAutoDrawable drawable, final int x, final int y, final int width, final int height,
+ final EyeParameter eyeParam, final EyePose eyePose);
}
diff --git a/src/jogl/classes/com/jogamp/opengl/util/stereo/StereoUtil.java b/src/jogl/classes/com/jogamp/opengl/util/stereo/StereoUtil.java
new file mode 100644
index 000000000..280d99233
--- /dev/null
+++ b/src/jogl/classes/com/jogamp/opengl/util/stereo/StereoUtil.java
@@ -0,0 +1,125 @@
+/**
+ * Copyright 2014 JogAmp Community. All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or without modification, are
+ * permitted provided that the following conditions are met:
+ *
+ * 1. Redistributions of source code must retain the above copyright notice, this list of
+ * conditions and the following disclaimer.
+ *
+ * 2. Redistributions in binary form must reproduce the above copyright notice, this list
+ * of conditions and the following disclaimer in the documentation and/or other materials
+ * provided with the distribution.
+ *
+ * THIS SOFTWARE IS PROVIDED BY JogAmp Community ``AS IS'' AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND
+ * FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL JogAmp Community OR
+ * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+ * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
+ * SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON
+ * ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
+ * NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
+ * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ *
+ * The views and conclusions contained in the software and documentation are those of the
+ * authors and should not be interpreted as representing official policies, either expressed
+ * or implied, of JogAmp Community.
+ */
+package com.jogamp.opengl.util.stereo;
+
+import com.jogamp.opengl.math.FloatUtil;
+import com.jogamp.opengl.math.Quaternion;
+import com.jogamp.opengl.math.VectorUtil;
+import com.jogamp.opengl.util.CustomGLEventListener;
+import com.jogamp.opengl.util.stereo.StereoDeviceRenderer.Eye;
+
+public class StereoUtil {
+
+ /** See {@link StereoDeviceRenderer#getDistortionBits()}. */
+ public static boolean usesBarrelDistortion(final int distortionBits) { return 0 != ( distortionBits & StereoDeviceRenderer.DISTORTION_BARREL ) ; }
+ /** See {@link StereoDeviceRenderer#getDistortionBits()}. */
+ public static boolean usesTimewarpDistortion(final int distortionBits) { return 0 != ( distortionBits & StereoDeviceRenderer.DISTORTION_TIMEWARP ) ; }
+ /** See {@link StereoDeviceRenderer#getDistortionBits()}. */
+ public static boolean usesChromaticDistortion(final int distortionBits) { return 0 != ( distortionBits & StereoDeviceRenderer.DISTORTION_CHROMATIC ) ; }
+ /** See {@link StereoDeviceRenderer#getDistortionBits()}. */
+ public static boolean usesVignetteDistortion(final int distortionBits) { return 0 != ( distortionBits & StereoDeviceRenderer.DISTORTION_VIGNETTE ) ; }
+
+ /** See {@link StereoDeviceRenderer#getDistortionBits()}. */
+ public static String distortionBitsToString(final int distortionBits) {
+ boolean appendComma = false;
+ final StringBuilder sb = new StringBuilder();
+ if( usesBarrelDistortion(distortionBits) ) {
+ if( appendComma ) { sb.append(", "); };
+ sb.append("barrell"); appendComma=true;
+ }
+ if( usesVignetteDistortion(distortionBits) ) {
+ if( appendComma ) { sb.append(", "); };
+ sb.append("vignette"); appendComma=true;
+ }
+ if( usesChromaticDistortion(distortionBits) ) {
+ if( appendComma ) { sb.append(", "); };
+ sb.append("chroma"); appendComma=true;
+ }
+ if( usesTimewarpDistortion(distortionBits) ) {
+ if( appendComma ) { sb.append(", "); };
+ sb.append("timewarp"); appendComma=true;
+ }
+ return sb.toString();
+ }
+
+ /**
+ * Calculates the <i>Side By Side</i>, SBS, projection- and modelview matrix for one eye.
+ * <p>
+ * {@link #updateEyePose(int)} must be called upfront.
+ * </p>
+ * <p>
+ * This method merely exist as an example implementation to compute the matrices,
+ * which shall be adopted by the
+ * {@link CustomGLEventListener#reshape(javax.media.opengl.GLAutoDrawable, int, int, int, int, EyeParameter, EyePose) upstream client code}.
+ * </p>
+ * @param eyeNum eye denominator
+ * @param zNear frustum near value
+ * @param zFar frustum far value
+ * @param mat4Projection float[16] projection matrix result
+ * @param mat4Modelview float[16] modelview matrix result
+ */
+ public static void getSBSUpstreamPMV(final Eye eye, final float zNear, final float zFar,
+ final float[] mat4Projection, final float[] mat4Modelview) {
+ final float[] mat4Tmp1 = new float[16];
+ final float[] mat4Tmp2 = new float[16];
+ final float[] vec3Tmp1 = new float[3];
+ final float[] vec3Tmp2 = new float[3];
+ final float[] vec3Tmp3 = new float[3];
+
+ final EyeParameter eyeParam = eye.getEyeParameter();
+ final EyePose eyePose = eye.getLastEyePose();
+
+ //
+ // Projection
+ //
+ FloatUtil.makePerspective(mat4Projection, 0, true, eyeParam.fovhv, zNear, zFar);
+
+ //
+ // Modelview
+ //
+ final Quaternion rollPitchYaw = new Quaternion();
+ // private final float eyeYaw = FloatUtil.PI; // 180 degrees in radians
+ // rollPitchYaw.rotateByAngleY(eyeYaw);
+ final float[] shiftedEyePos = rollPitchYaw.rotateVector(vec3Tmp1, 0, eyePose.position, 0);
+ VectorUtil.addVec3(shiftedEyePos, shiftedEyePos, eyeParam.positionOffset);
+
+ rollPitchYaw.mult(eyePose.orientation);
+ final float[] up = rollPitchYaw.rotateVector(vec3Tmp2, 0, VectorUtil.VEC3_UNIT_Y, 0);
+ final float[] forward = rollPitchYaw.rotateVector(vec3Tmp3, 0, VectorUtil.VEC3_UNIT_Z_NEG, 0);
+ final float[] center = VectorUtil.addVec3(forward, shiftedEyePos, forward);
+
+ final float[] mLookAt = FloatUtil.makeLookAt(mat4Tmp2, 0, shiftedEyePos, 0, center, 0, up, 0, mat4Tmp1);
+ final float[] mViewAdjust = FloatUtil.makeTranslation(mat4Modelview, true,
+ eyeParam.distNoseToPupilX,
+ eyeParam.distMiddleToPupilY,
+ eyeParam.eyeReliefZ);
+
+ /* mat4Modelview = */ FloatUtil.multMatrix(mViewAdjust, mLookAt);
+ }
+
+}
diff --git a/src/jogl/classes/jogamp/opengl/GLDrawableHelper.java b/src/jogl/classes/jogamp/opengl/GLDrawableHelper.java
index aea9a5b7b..e3ce7001e 100644
--- a/src/jogl/classes/jogamp/opengl/GLDrawableHelper.java
+++ b/src/jogl/classes/jogamp/opengl/GLDrawableHelper.java
@@ -649,8 +649,10 @@ public class GLDrawableHelper {
public final void display(final GLAutoDrawable drawable) {
displayImpl(drawable);
+ // runForAllGLEventListener(drawable, displayAction);
if( glRunnables.size()>0 && !execGLRunnables(drawable) ) { // glRunnables volatile OK; execGL.. only executed if size > 0
displayImpl(drawable);
+ // runForAllGLEventListener(drawable, displayAction);
}
}
private final void displayImpl(final GLAutoDrawable drawable) {
@@ -669,6 +671,31 @@ public class GLDrawableHelper {
}
}
+ public static interface GLEventListenerAction {
+ public void run(final GLAutoDrawable drawable, final GLEventListener listener);
+ }
+ /**
+ private static GLEventListenerAction displayAction = new GLEventListenerAction() {
+ public void run(final GLAutoDrawable drawable, final GLEventListener listener) {
+ listener.display(drawable);
+ } }; */
+
+ public final void runForAllGLEventListener(final GLAutoDrawable drawable, final GLEventListenerAction action) {
+ synchronized(listenersLock) {
+ final ArrayList<GLEventListener> _listeners = listeners;
+ final int listenerCount = _listeners.size();
+ for (int i=0; i < listenerCount; i++) {
+ final GLEventListener listener = _listeners.get(i) ;
+ // GLEventListener may need to be init,
+ // in case this one is added after the realization of the GLAutoDrawable
+ if( listenersToBeInit.remove(listener) ) {
+ init( listener, drawable, true /* sendReshape */, listenersToBeInit.size() + 1 == listenerCount /* setViewport if 1st init */ );
+ }
+ action.run(drawable, listener);
+ }
+ }
+ }
+
private final void reshape(final GLEventListener listener, final GLAutoDrawable drawable,
final int x, final int y, final int width, final int height, final boolean setViewport, final boolean checkInit) {
if(checkInit) {
diff --git a/src/jogl/classes/jogamp/opengl/util/av/GLMediaPlayerImpl.java b/src/jogl/classes/jogamp/opengl/util/av/GLMediaPlayerImpl.java
index 9cfa94a60..f36681e3b 100644
--- a/src/jogl/classes/jogamp/opengl/util/av/GLMediaPlayerImpl.java
+++ b/src/jogl/classes/jogamp/opengl/util/av/GLMediaPlayerImpl.java
@@ -36,7 +36,6 @@ import java.util.Map;
import javax.media.nativewindow.AbstractGraphicsDevice;
import javax.media.opengl.GL;
-import javax.media.opengl.GL2;
import javax.media.opengl.GL2GL3;
import javax.media.opengl.GLContext;
import javax.media.opengl.GLDrawable;
diff --git a/src/oculusvr/classes/com/jogamp/opengl/oculusvr/OVRSBSRendererDualFBO.java b/src/oculusvr/classes/com/jogamp/opengl/oculusvr/OVRSBSRendererDualFBO.java
deleted file mode 100644
index 9af3397cf..000000000
--- a/src/oculusvr/classes/com/jogamp/opengl/oculusvr/OVRSBSRendererDualFBO.java
+++ /dev/null
@@ -1,233 +0,0 @@
-/**
- * Copyright 2014 JogAmp Community. All rights reserved.
- *
- * Redistribution and use in source and binary forms, with or without modification, are
- * permitted provided that the following conditions are met:
- *
- * 1. Redistributions of source code must retain the above copyright notice, this list of
- * conditions and the following disclaimer.
- *
- * 2. Redistributions in binary form must reproduce the above copyright notice, this list
- * of conditions and the following disclaimer in the documentation and/or other materials
- * provided with the distribution.
- *
- * THIS SOFTWARE IS PROVIDED BY JogAmp Community ``AS IS'' AND ANY EXPRESS OR IMPLIED
- * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND
- * FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL JogAmp Community OR
- * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
- * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
- * SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON
- * ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
- * NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
- * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
- *
- * The views and conclusions contained in the software and documentation are those of the
- * authors and should not be interpreted as representing official policies, either expressed
- * or implied, of JogAmp Community.
- */
-package com.jogamp.opengl.oculusvr;
-
-import javax.media.opengl.GL;
-import javax.media.opengl.GL2ES2;
-import javax.media.opengl.GLAutoDrawable;
-import javax.media.opengl.GLEventListener;
-
-import jogamp.opengl.oculusvr.OVRDistortion;
-
-import com.jogamp.oculusvr.OVR;
-import com.jogamp.oculusvr.ovrFrameTiming;
-import com.jogamp.opengl.FBObject;
-import com.jogamp.opengl.FBObject.Attachment;
-import com.jogamp.opengl.FBObject.TextureAttachment;
-import com.jogamp.opengl.FBObject.Attachment.Type;
-import com.jogamp.opengl.util.CustomRendererListener;
-import com.jogamp.opengl.util.stereo.StereoRendererListener;
-
-/**
- * OculusVR (OVR) <i>Side By Side</i> Distortion Renderer utilizing {@link OVRDistortion}
- * implementing {@link GLEventListener} for convenience.
- * <p>
- * Implementation renders an {@link StereoRendererListener} instance
- * side-by-side using two {@link FBObject}s according to {@link OVRDistortion}.
- * </p>
- */
-public class OVRSBSRendererDualFBO implements GLEventListener {
- private final OVRDistortion dist;
- private final boolean ownsDist;
- private final StereoRendererListener upstream;
- private final FBObject[] fbos;
- private final int magFilter;
- private final int minFilter;
-
- private int numSamples;
- private final TextureAttachment[] fboTexs;
-
- public OVRSBSRendererDualFBO(final OVRDistortion dist, final boolean ownsDist, final StereoRendererListener upstream,
- final int magFilter, final int minFilter, final int numSamples) {
- this.dist = dist;
- this.ownsDist = ownsDist;
- this.upstream = upstream;
- this.fbos = new FBObject[2];
- this.fbos[0] = new FBObject();
- this.fbos[1] = new FBObject();
- this.magFilter = magFilter;
- this.minFilter = minFilter;
-
- this.numSamples = numSamples;
- this.fboTexs = new TextureAttachment[2];
- }
-
- private void initFBOs(final GL gl, final int width, final int height) {
- // remove all texture attachments, since MSAA uses just color-render-buffer
- // and non-MSAA uses texture2d-buffer
- fbos[0].detachAllColorbuffer(gl);
- fbos[1].detachAllColorbuffer(gl);
-
- fbos[0].reset(gl, width, height, numSamples, false);
- fbos[1].reset(gl, width, height, numSamples, false);
- if(fbos[0].getNumSamples() != fbos[1].getNumSamples()) {
- throw new InternalError("sample size mismatch: \n\t0: "+fbos[0]+"\n\t1: "+fbos[1]);
- }
- numSamples = fbos[0].getNumSamples();
-
- if(numSamples>0) {
- fbos[0].attachColorbuffer(gl, 0, true); // MSAA requires alpha
- fbos[0].attachRenderbuffer(gl, Type.DEPTH, 24);
- final FBObject ssink0 = new FBObject();
- {
- ssink0.reset(gl, width, height);
- ssink0.attachTexture2D(gl, 0, false, magFilter, minFilter, GL.GL_CLAMP_TO_EDGE, GL.GL_CLAMP_TO_EDGE);
- ssink0.attachRenderbuffer(gl, Attachment.Type.DEPTH, 24);
- }
- fbos[0].setSamplingSink(ssink0);
- fbos[0].resetSamplingSink(gl); // validate
- fboTexs[0] = fbos[0].getSamplingSink();
-
- fbos[1].attachColorbuffer(gl, 0, true); // MSAA requires alpha
- fbos[1].attachRenderbuffer(gl, Type.DEPTH, 24);
- final FBObject ssink1 = new FBObject();
- {
- ssink1.reset(gl, width, height);
- ssink1.attachTexture2D(gl, 0, false, magFilter, minFilter, GL.GL_CLAMP_TO_EDGE, GL.GL_CLAMP_TO_EDGE);
- ssink1.attachRenderbuffer(gl, Attachment.Type.DEPTH, 24);
- }
- fbos[1].setSamplingSink(ssink1);
- fbos[1].resetSamplingSink(gl); // validate
- fboTexs[1] = fbos[1].getSamplingSink();
- } else {
- fboTexs[0] = fbos[0].attachTexture2D(gl, 0, false, magFilter, minFilter, GL.GL_CLAMP_TO_EDGE, GL.GL_CLAMP_TO_EDGE);
- fbos[0].attachRenderbuffer(gl, Type.DEPTH, 24);
- fboTexs[1] = fbos[1].attachTexture2D(gl, 0, false, magFilter, minFilter, GL.GL_CLAMP_TO_EDGE, GL.GL_CLAMP_TO_EDGE);
- fbos[1].attachRenderbuffer(gl, Type.DEPTH, 24);
- }
- fbos[0].unbind(gl);
- fbos[1].unbind(gl);
- }
-
- @SuppressWarnings("unused")
- private void resetFBOs(final GL gl, final int width, final int height) {
- fbos[0].reset(gl, width, height, numSamples, true);
- fbos[1].reset(gl, width, height, numSamples, true);
- if(fbos[0].getNumSamples() != fbos[1].getNumSamples()) {
- throw new InternalError("sample size mismatch: \n\t0: "+fbos[0]+"\n\t1: "+fbos[1]);
- }
- numSamples = fbos[0].getNumSamples();
- if(numSamples>0) {
- fboTexs[0] = fbos[0].getSamplingSink();
- fboTexs[1] = fbos[1].getSamplingSink();
- } else {
- fboTexs[0] = (TextureAttachment) fbos[0].getColorbuffer(0);
- fboTexs[1] = (TextureAttachment) fbos[1].getColorbuffer(0);
- }
- }
-
- @Override
- public void init(final GLAutoDrawable drawable) {
- final GL2ES2 gl = drawable.getGL().getGL2ES2();
- dist.init(gl);
-
- // We will do some offscreen rendering, setup FBO...
- if( null != upstream ) {
- final int[] textureSize = dist.textureSize;
- initFBOs(gl, textureSize[0], textureSize[1]);
- upstream.init(drawable);
- }
-
- gl.setSwapInterval(1);
- }
-
- @Override
- public void dispose(final GLAutoDrawable drawable) {
- final GL2ES2 gl = drawable.getGL().getGL2ES2();
- // FIXME complete release
- if( null != upstream ) {
- upstream.dispose(drawable);
- fbos[0].destroy(gl);
- fbos[1].destroy(gl);
- }
- if( ownsDist ) {
- dist.dispose(gl);
- }
- }
-
- @Override
- public void display(final GLAutoDrawable drawable) {
- final ovrFrameTiming frameTiming = OVR.ovrHmd_BeginFrameTiming(dist.hmdCtx, 0);
-
- final GL2ES2 gl = drawable.getGL().getGL2ES2();
- if(0 < numSamples) {
- gl.glEnable(GL.GL_MULTISAMPLE);
- }
-
- // FIXME: Instead of setting the viewport,
- // it's better to change the projection matrix!
- if( null != upstream ) {
- for(int eyeNum=0; eyeNum<2; eyeNum++) {
- // final ovrPosef eyeRenderPose = OVR.ovrHmd_GetEyePose(hmdCtx, eyeNum);
- // final float[] eyePos = OVRUtil.getVec3f(eyeRenderPose.getPosition());
- fbos[eyeNum].bind(gl);
-
- final OVRDistortion.EyeData eyeDist = dist.eyes[eyeNum];
- final int[] viewport = eyeDist.viewport;
- gl.glViewport(viewport[0], viewport[1], viewport[2], viewport[3]);
-
- dist.updateEyePose(eyeNum);
- upstream.reshapeEye(drawable, viewport[0], viewport[1], viewport[2], viewport[3],
- dist.getEyeParam(eyeNum), dist.updateEyePose(eyeNum));
- upstream.display(drawable, eyeNum > 0 ? CustomRendererListener.DISPLAY_REPEAT : 0);
- fbos[eyeNum].unbind(gl);
- }
- gl.glViewport(0, 0, drawable.getSurfaceWidth(), drawable.getSurfaceHeight());
- }
-
- gl.glClearColor(0.0f, 0.0f, 0.0f, 0.0f);
- gl.glClear(GL.GL_COLOR_BUFFER_BIT);
- gl.glActiveTexture(GL.GL_TEXTURE0 + dist.texUnit0.intValue());
-
- if( null != upstream ) {
- dist.displayOneEyePre(gl, frameTiming.getTimewarpPointSeconds());
- fbos[0].use(gl, fboTexs[0]);
- dist.displayOneEye(gl, 0);
- fbos[0].unuse(gl);
- fbos[1].use(gl, fboTexs[1]);
- dist.displayOneEye(gl, 1);
- fbos[1].unuse(gl);
- dist.displayOneEyePost(gl);
- } else {
- dist.display(gl, frameTiming.getTimewarpPointSeconds());
- }
-
- if( !drawable.getAutoSwapBufferMode() ) {
- drawable.swapBuffers();
- }
- OVR.ovrHmd_EndFrameTiming(dist.hmdCtx);
- }
-
- @Override
- public void reshape(final GLAutoDrawable drawable, final int x, final int y, final int width, final int height) {
- if( !drawable.getAutoSwapBufferMode() ) {
- final GL2ES2 gl = drawable.getGL().getGL2ES2();
- gl.glViewport(0, 0, width, height);
- }
- }
-}
diff --git a/src/oculusvr/classes/com/jogamp/opengl/oculusvr/OVRSBSRendererSingleFBO.java b/src/oculusvr/classes/com/jogamp/opengl/oculusvr/OVRSBSRendererSingleFBO.java
deleted file mode 100644
index b18d1634e..000000000
--- a/src/oculusvr/classes/com/jogamp/opengl/oculusvr/OVRSBSRendererSingleFBO.java
+++ /dev/null
@@ -1,206 +0,0 @@
-/**
- * Copyright 2014 JogAmp Community. All rights reserved.
- *
- * Redistribution and use in source and binary forms, with or without modification, are
- * permitted provided that the following conditions are met:
- *
- * 1. Redistributions of source code must retain the above copyright notice, this list of
- * conditions and the following disclaimer.
- *
- * 2. Redistributions in binary form must reproduce the above copyright notice, this list
- * of conditions and the following disclaimer in the documentation and/or other materials
- * provided with the distribution.
- *
- * THIS SOFTWARE IS PROVIDED BY JogAmp Community ``AS IS'' AND ANY EXPRESS OR IMPLIED
- * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND
- * FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL JogAmp Community OR
- * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
- * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
- * SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON
- * ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
- * NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
- * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
- *
- * The views and conclusions contained in the software and documentation are those of the
- * authors and should not be interpreted as representing official policies, either expressed
- * or implied, of JogAmp Community.
- */
-package com.jogamp.opengl.oculusvr;
-
-import javax.media.opengl.GL;
-import javax.media.opengl.GL2ES2;
-import javax.media.opengl.GLAutoDrawable;
-import javax.media.opengl.GLEventListener;
-
-import jogamp.opengl.oculusvr.OVRDistortion;
-
-import com.jogamp.oculusvr.OVR;
-import com.jogamp.oculusvr.ovrFrameTiming;
-import com.jogamp.opengl.FBObject;
-import com.jogamp.opengl.FBObject.Attachment;
-import com.jogamp.opengl.FBObject.TextureAttachment;
-import com.jogamp.opengl.FBObject.Attachment.Type;
-import com.jogamp.opengl.util.CustomRendererListener;
-import com.jogamp.opengl.util.stereo.StereoRendererListener;
-
-/**
- * OculusVR (OVR) <i>Side By Side</i> Distortion Renderer utilizing {@link OVRDistortion}
- * implementing {@link GLEventListener} for convenience.
- * <p>
- * Implementation renders an {@link StereoRendererListener} instance
- * side-by-side within one {@link FBObject} according to {@link OVRDistortion}.
- * </p>
- */
-public class OVRSBSRendererSingleFBO implements GLEventListener {
-
- private final OVRDistortion dist;
- private final boolean ownsDist;
- private final StereoRendererListener upstream;
- private final FBObject fbo;
- private final int magFilter;
- private final int minFilter;
-
- private int numSamples;
- private TextureAttachment fboTex;
-
- /**
- * @param dist {@link OVRDistortion} instance used for rendering.
- * @param ownsDist if true, {@link OVRDistortion#dispose(GL2ES2)} is issued on this instance's {@link #dispose(GLAutoDrawable)} method, otherwise not.
- * @param upstream the upstream {@link StereoRendererListener}, a.k.a the <i>content</i> to render for both eyes
- * @param magFilter if > 0 value for {@link GL#GL_TEXTURE_MAG_FILTER}
- * @param minFilter if > 0 value for {@link GL#GL_TEXTURE_MIN_FILTER}
- * @param numSamples sample-count, if > 0 using multisampling w/ given samples, otherwise no multisampling applies
- */
- public OVRSBSRendererSingleFBO(final OVRDistortion dist, final boolean ownsDist, final StereoRendererListener upstream,
- final int magFilter, final int minFilter, final int numSamples) {
- this.dist = dist;
- this.ownsDist = ownsDist;
- this.upstream = upstream;
- this.fbo = new FBObject();
- this.magFilter = magFilter;
- this.minFilter = minFilter;
-
- this.numSamples = numSamples;
- }
-
- private void initFBOs(final GL gl, final int width, final int height) {
- // remove all texture attachments, since MSAA uses just color-render-buffer
- // and non-MSAA uses texture2d-buffer
- fbo.detachAllColorbuffer(gl);
-
- fbo.reset(gl, width, height, numSamples, false);
- numSamples = fbo.getNumSamples();
-
- if(numSamples>0) {
- fbo.attachColorbuffer(gl, 0, true); // MSAA requires alpha
- fbo.attachRenderbuffer(gl, Type.DEPTH, 24);
- final FBObject ssink = new FBObject();
- {
- ssink.reset(gl, width, height);
- ssink.attachTexture2D(gl, 0, false, magFilter, minFilter, GL.GL_CLAMP_TO_EDGE, GL.GL_CLAMP_TO_EDGE);
- ssink.attachRenderbuffer(gl, Attachment.Type.DEPTH, 24);
- }
- fbo.setSamplingSink(ssink);
- fbo.resetSamplingSink(gl); // validate
- fboTex = fbo.getSamplingSink();
- } else {
- fboTex = fbo.attachTexture2D(gl, 0, false, magFilter, minFilter, GL.GL_CLAMP_TO_EDGE, GL.GL_CLAMP_TO_EDGE);
- fbo.attachRenderbuffer(gl, Type.DEPTH, 24);
- }
- fbo.unbind(gl);
- }
-
- @SuppressWarnings("unused")
- private void resetFBOs(final GL gl, final int width, final int height) {
- fbo.reset(gl, width, height, numSamples, true);
- numSamples = fbo.getNumSamples();
- if(numSamples>0) {
- fboTex = fbo.getSamplingSink();
- } else {
- fboTex = (TextureAttachment) fbo.getColorbuffer(0);
- }
- }
-
- @Override
- public void init(final GLAutoDrawable drawable) {
- final GL2ES2 gl = drawable.getGL().getGL2ES2();
- dist.init(gl);
-
- // We will do some offscreen rendering, setup FBO...
- if( null != upstream ) {
- final int[] textureSize = dist.textureSize;
- initFBOs(gl, textureSize[0], textureSize[1]);
- upstream.init(drawable);
- }
-
- gl.setSwapInterval(1);
- }
-
- @Override
- public void dispose(final GLAutoDrawable drawable) {
- final GL2ES2 gl = drawable.getGL().getGL2ES2();
- // FIXME complete release
- if( null != upstream ) {
- upstream.dispose(drawable);
- fbo.destroy(gl);
- }
- if( ownsDist ) {
- dist.dispose(gl);
- }
- }
-
- @Override
- public void display(final GLAutoDrawable drawable) {
- final ovrFrameTiming frameTiming = OVR.ovrHmd_BeginFrameTiming(dist.hmdCtx, 0);
-
- final GL2ES2 gl = drawable.getGL().getGL2ES2();
- if(0 < numSamples) {
- gl.glEnable(GL.GL_MULTISAMPLE);
- }
-
- // FIXME: Instead of setting the viewport,
- // it's better to change the projection matrix!
- if( null != upstream ) {
- fbo.bind(gl);
-
- for(int eyeNum=0; eyeNum<2; eyeNum++) {
- // final ovrPosef eyeRenderPose = OVR.ovrHmd_GetEyePose(hmdCtx, eyeNum);
- // final float[] eyePos = OVRUtil.getVec3f(eyeRenderPose.getPosition());
- final OVRDistortion.EyeData eyeDist = dist.eyes[eyeNum];
- final int[] viewport = eyeDist.viewport;
- gl.glViewport(viewport[0], viewport[1], viewport[2], viewport[3]);
-
- upstream.reshapeEye(drawable, viewport[0], viewport[1], viewport[2], viewport[3],
- dist.getEyeParam(eyeNum), dist.updateEyePose(eyeNum));
- upstream.display(drawable, eyeNum > 0 ? CustomRendererListener.DISPLAY_REPEAT | CustomRendererListener.DISPLAY_DONTCLEAR : 0);
- }
- fbo.unbind(gl);
- gl.glViewport(0, 0, drawable.getSurfaceWidth(), drawable.getSurfaceHeight());
- }
-
- gl.glClearColor(0.0f, 0.0f, 0.0f, 0.0f);
- gl.glClear(GL.GL_COLOR_BUFFER_BIT);
- gl.glActiveTexture(GL.GL_TEXTURE0 + dist.texUnit0.intValue());
-
- if( null != upstream ) {
- fbo.use(gl, fboTex);
- dist.display(gl, frameTiming.getTimewarpPointSeconds());
- fbo.unuse(gl);
- } else {
- dist.display(gl, frameTiming.getTimewarpPointSeconds());
- }
-
- if( !drawable.getAutoSwapBufferMode() ) {
- drawable.swapBuffers();
- }
- OVR.ovrHmd_EndFrameTiming(dist.hmdCtx);
- }
-
- @Override
- public void reshape(final GLAutoDrawable drawable, final int x, final int y, final int width, final int height) {
- if( !drawable.getAutoSwapBufferMode() ) {
- final GL2ES2 gl = drawable.getGL().getGL2ES2();
- gl.glViewport(0, 0, width, height);
- }
- }
-}
diff --git a/src/oculusvr/classes/jogamp/opengl/oculusvr/OVRDistortion.java b/src/oculusvr/classes/jogamp/opengl/oculusvr/OVRDistortion.java
deleted file mode 100644
index c32270ac4..000000000
--- a/src/oculusvr/classes/jogamp/opengl/oculusvr/OVRDistortion.java
+++ /dev/null
@@ -1,690 +0,0 @@
-/**
- * Copyright 2014 JogAmp Community. All rights reserved.
- *
- * Redistribution and use in source and binary forms, with or without modification, are
- * permitted provided that the following conditions are met:
- *
- * 1. Redistributions of source code must retain the above copyright notice, this list of
- * conditions and the following disclaimer.
- *
- * 2. Redistributions in binary form must reproduce the above copyright notice, this list
- * of conditions and the following disclaimer in the documentation and/or other materials
- * provided with the distribution.
- *
- * THIS SOFTWARE IS PROVIDED BY JogAmp Community ``AS IS'' AND ANY EXPRESS OR IMPLIED
- * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND
- * FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL JogAmp Community OR
- * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
- * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
- * SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON
- * ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
- * NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
- * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
- *
- * The views and conclusions contained in the software and documentation are those of the
- * authors and should not be interpreted as representing official policies, either expressed
- * or implied, of JogAmp Community.
- */
-package jogamp.opengl.oculusvr;
-
-import java.nio.FloatBuffer;
-import java.nio.ShortBuffer;
-
-import javax.media.opengl.GL;
-import javax.media.opengl.GL2ES2;
-import javax.media.opengl.GLArrayData;
-import javax.media.opengl.GLEventListener;
-import javax.media.opengl.GLException;
-import javax.media.opengl.GLUniformData;
-
-import jogamp.common.os.PlatformPropsImpl;
-
-import com.jogamp.common.nio.Buffers;
-import com.jogamp.common.os.Platform;
-import com.jogamp.oculusvr.OVR;
-import com.jogamp.oculusvr.OVRException;
-import com.jogamp.oculusvr.OvrHmdContext;
-import com.jogamp.oculusvr.ovrDistortionMesh;
-import com.jogamp.oculusvr.ovrDistortionVertex;
-import com.jogamp.oculusvr.ovrEyeRenderDesc;
-import com.jogamp.oculusvr.ovrFovPort;
-import com.jogamp.oculusvr.ovrMatrix4f;
-import com.jogamp.oculusvr.ovrPosef;
-import com.jogamp.oculusvr.ovrRecti;
-import com.jogamp.oculusvr.ovrSizei;
-import com.jogamp.oculusvr.ovrVector2f;
-import com.jogamp.oculusvr.ovrVector3f;
-import com.jogamp.opengl.JoglVersion;
-import com.jogamp.opengl.math.FloatUtil;
-import com.jogamp.opengl.math.Quaternion;
-import com.jogamp.opengl.math.VectorUtil;
-import com.jogamp.opengl.util.CustomRendererListener;
-import com.jogamp.opengl.util.GLArrayDataServer;
-import com.jogamp.opengl.util.glsl.ShaderCode;
-import com.jogamp.opengl.util.glsl.ShaderProgram;
-import com.jogamp.opengl.util.stereo.EyeParameter;
-import com.jogamp.opengl.util.stereo.EyePose;
-
-/**
- * OculusVR Distortion Data and OpenGL Renderer Utility
- */
-public class OVRDistortion {
- public static final float[] VEC3_UP = { 0f, 1f, 0f };
- public static final float[] VEC3_FORWARD = { 0f, 0f, -1f };
-
- private static final String shaderPrefix01 = "dist01";
- private static final String shaderTimewarpSuffix = "_timewarp";
- private static final String shaderChromaSuffix = "_chroma";
- private static final String shaderPlainSuffix = "_plain";
-
- public static boolean useTimewarp(final int distortionCaps) { return 0 != ( distortionCaps & OVR.ovrDistortionCap_TimeWarp ) ; }
- public static boolean useChromatic(final int distortionCaps) { return 0 != ( distortionCaps & OVR.ovrDistortionCap_Chromatic ) ; }
- public static boolean useVignette(final int distortionCaps) { return 0 != ( distortionCaps & OVR.ovrDistortionCap_Vignette ) ; }
-
- public static class EyeData {
- public final int eyeName;
- public final int distortionCaps;
- public final int vertexCount;
- public final int indexCount;
- public final int[/*4*/] viewport;
-
- public final GLUniformData eyeToSourceUVScale;
- public final GLUniformData eyeToSourceUVOffset;
- public final GLUniformData eyeRotationStart;
- public final GLUniformData eyeRotationEnd;
-
- /** 2+2+2+2+2: { vec2 position, vec2 color, vec2 texCoordR, vec2 texCoordG, vec2 texCoordB } */
- public final GLArrayDataServer iVBO;
- public final GLArrayData vboPos, vboParams, vboTexCoordsR, vboTexCoordsG, vboTexCoordsB;
- public final GLArrayDataServer indices;
-
- public final ovrEyeRenderDesc ovrEyeDesc;
- public final ovrFovPort ovrEyeFov;
- public final EyeParameter eyeParameter;
-
- public ovrPosef ovrEyePose;
- public EyePose eyePose;
-
- public final boolean useTimewarp() { return OVRDistortion.useTimewarp(distortionCaps); }
- public final boolean useChromatic() { return OVRDistortion.useChromatic(distortionCaps); }
- public final boolean useVignette() { return OVRDistortion.useVignette(distortionCaps); }
-
- private EyeData(final OvrHmdContext hmdCtx, final int distortionCaps,
- final float[] eyePositionOffset, final ovrEyeRenderDesc eyeDesc,
- final ovrSizei ovrTextureSize, final int[] eyeRenderViewport) {
- this.eyeName = eyeDesc.getEye();
- this.distortionCaps = distortionCaps;
- viewport = new int[4];
- System.arraycopy(eyeRenderViewport, 0, viewport, 0, 4);
-
- final FloatBuffer fstash = Buffers.newDirectFloatBuffer(2+2+16+26);
-
- eyeToSourceUVScale = new GLUniformData("ovr_EyeToSourceUVScale", 2, Buffers.slice2Float(fstash, 0, 2));
- eyeToSourceUVOffset = new GLUniformData("ovr_EyeToSourceUVOffset", 2, Buffers.slice2Float(fstash, 2, 2));
-
- if( useTimewarp() ) {
- eyeRotationStart = new GLUniformData("ovr_EyeRotationStart", 4, 4, Buffers.slice2Float(fstash, 4, 16));
- eyeRotationEnd = new GLUniformData("ovr_EyeRotationEnd", 4, 4, Buffers.slice2Float(fstash, 20, 16));
- } else {
- eyeRotationStart = null;
- eyeRotationEnd = null;
- }
-
- this.ovrEyeDesc = eyeDesc;
- this.ovrEyeFov = eyeDesc.getFov();
-
- final ovrVector3f eyeViewAdjust = eyeDesc.getViewAdjust();
- this.eyeParameter = new EyeParameter(eyeName, eyePositionOffset, OVRUtil.getFovHV(ovrEyeFov),
- eyeViewAdjust.getX(), eyeViewAdjust.getY(), eyeViewAdjust.getZ());
-
- this.eyePose = new EyePose(eyeName);
-
- updateEyePose(hmdCtx);
-
- final ovrDistortionMesh meshData = ovrDistortionMesh.create();
- final ovrFovPort fov = eyeDesc.getFov();
-
- if( !OVR.ovrHmd_CreateDistortionMesh(hmdCtx, eyeName, fov, distortionCaps, meshData) ) {
- throw new OVRException("Failed to create meshData for eye "+eyeName+" and "+OVRUtil.toString(fov));
- }
- vertexCount = meshData.getVertexCount();
- indexCount = meshData.getIndexCount();
-
- /** 2+2+2+2+2: { vec2 position, vec2 color, vec2 texCoordR, vec2 texCoordG, vec2 texCoordB } */
- final boolean useChromatic = useChromatic();
- final boolean useVignette = useVignette();
- final int compsPerElement = 2+2+2+( useChromatic ? 2+2 /* texCoordG + texCoordB */: 0 );
- iVBO = GLArrayDataServer.createGLSLInterleaved(compsPerElement, GL.GL_FLOAT, false, vertexCount, GL.GL_STATIC_DRAW);
- vboPos = iVBO.addGLSLSubArray("ovr_Position", 2, GL.GL_ARRAY_BUFFER);
- vboParams = iVBO.addGLSLSubArray("ovr_Params", 2, GL.GL_ARRAY_BUFFER);
- vboTexCoordsR = iVBO.addGLSLSubArray("ovr_TexCoordR", 2, GL.GL_ARRAY_BUFFER);
- if( useChromatic ) {
- vboTexCoordsG = iVBO.addGLSLSubArray("ovr_TexCoordG", 2, GL.GL_ARRAY_BUFFER);
- vboTexCoordsB = iVBO.addGLSLSubArray("ovr_TexCoordB", 2, GL.GL_ARRAY_BUFFER);
- } else {
- vboTexCoordsG = null;
- vboTexCoordsB = null;
- }
- indices = GLArrayDataServer.createData(1, GL.GL_SHORT, indexCount, GL.GL_STATIC_DRAW, GL.GL_ELEMENT_ARRAY_BUFFER);
-
- // Setup: eyeToSourceUVScale, eyeToSourceUVOffset
- {
- final ovrVector2f[] uvScaleOffsetOut = new ovrVector2f[2];
- uvScaleOffsetOut[0] = ovrVector2f.create(); // FIXME: remove ctor / double check
- uvScaleOffsetOut[1] = ovrVector2f.create();
-
- final ovrRecti ovrEyeRenderViewport = OVRUtil.createOVRRecti(eyeRenderViewport);
- OVR.ovrHmd_GetRenderScaleAndOffset(fov, ovrTextureSize, ovrEyeRenderViewport, uvScaleOffsetOut);
- if( OVRUtil.DEBUG ) {
- System.err.println("XXX."+eyeName+": fov "+OVRUtil.toString(fov));
- System.err.println("XXX."+eyeName+": uvScale "+OVRUtil.toString(uvScaleOffsetOut[0]));
- System.err.println("XXX."+eyeName+": uvOffset "+OVRUtil.toString(uvScaleOffsetOut[0]));
- System.err.println("XXX."+eyeName+": textureSize "+OVRUtil.toString(ovrTextureSize));
- System.err.println("XXX."+eyeName+": viewport "+OVRUtil.toString(ovrEyeRenderViewport));
- }
- final FloatBuffer eyeToSourceUVScaleFB = eyeToSourceUVScale.floatBufferValue();
- eyeToSourceUVScaleFB.put(0, uvScaleOffsetOut[0].getX());
- eyeToSourceUVScaleFB.put(1, uvScaleOffsetOut[0].getY());
- final FloatBuffer eyeToSourceUVOffsetFB = eyeToSourceUVOffset.floatBufferValue();
- eyeToSourceUVOffsetFB.put(0, uvScaleOffsetOut[1].getX());
- eyeToSourceUVOffsetFB.put(1, uvScaleOffsetOut[1].getY());
- }
-
- /** 2+2+2+2+2: { vec2 position, vec2 color, vec2 texCoordR, vec2 texCoordG, vec2 texCoordB } */
- final FloatBuffer iVBOFB = (FloatBuffer)iVBO.getBuffer();
- final ovrDistortionVertex[] ovRes = new ovrDistortionVertex[1];
- ovRes[0] = ovrDistortionVertex.create(); // FIXME: remove ctor / double check
-
- for ( int vertNum = 0; vertNum < vertexCount; vertNum++ ) {
- final ovrDistortionVertex ov = meshData.getPVertexData(vertNum, ovRes)[0];
- ovrVector2f v;
-
- // pos
- v = ov.getPos();
- iVBOFB.put(v.getX());
- iVBOFB.put(v.getY());
-
- // params
- if( useVignette ) {
- iVBOFB.put(ov.getVignetteFactor());
- } else {
- iVBOFB.put(1.0f);
- }
- iVBOFB.put(ov.getTimeWarpFactor());
-
- // texCoordR
- v = ov.getTexR();
- iVBOFB.put(v.getX());
- iVBOFB.put(v.getY());
-
- if( useChromatic ) {
- // texCoordG
- v = ov.getTexG();
- iVBOFB.put(v.getX());
- iVBOFB.put(v.getY());
-
- // texCoordB
- v = ov.getTexB();
- iVBOFB.put(v.getX());
- iVBOFB.put(v.getY());
- }
- }
- if( OVRUtil.DEBUG ) {
- System.err.println("XXX."+eyeName+": iVBO "+iVBO);
- }
- {
- final ShortBuffer in = meshData.getPIndexData();
- final ShortBuffer out = (ShortBuffer) indices.getBuffer();
- out.put(in);
- }
- if( OVRUtil.DEBUG ) {
- System.err.println("XXX."+eyeName+": idx "+indices);
- System.err.println("XXX."+eyeName+": distEye "+this);
- }
- OVR.ovrHmd_DestroyDistortionMesh(meshData);
- }
-
- private void linkData(final GL2ES2 gl, final ShaderProgram sp) {
- if( 0 > vboPos.setLocation(gl, sp.program()) ) {
- throw new OVRException("Couldn't locate "+vboPos);
- }
- if( 0 > vboParams.setLocation(gl, sp.program()) ) {
- throw new OVRException("Couldn't locate "+vboParams);
- }
- if( 0 > vboTexCoordsR.setLocation(gl, sp.program()) ) {
- throw new OVRException("Couldn't locate "+vboTexCoordsR);
- }
- if( useChromatic() ) {
- if( 0 > vboTexCoordsG.setLocation(gl, sp.program()) ) {
- throw new OVRException("Couldn't locate "+vboTexCoordsG);
- }
- if( 0 > vboTexCoordsB.setLocation(gl, sp.program()) ) {
- throw new OVRException("Couldn't locate "+vboTexCoordsB);
- }
- }
- if( 0 > eyeToSourceUVScale.setLocation(gl, sp.program()) ) {
- throw new OVRException("Couldn't locate "+eyeToSourceUVScale);
- }
- if( 0 > eyeToSourceUVOffset.setLocation(gl, sp.program()) ) {
- throw new OVRException("Couldn't locate "+eyeToSourceUVOffset);
- }
- if( useTimewarp() ) {
- if( 0 > eyeRotationStart.setLocation(gl, sp.program()) ) {
- throw new OVRException("Couldn't locate "+eyeRotationStart);
- }
- if( 0 > eyeRotationEnd.setLocation(gl, sp.program()) ) {
- throw new OVRException("Couldn't locate "+eyeRotationEnd);
- }
- }
- iVBO.seal(gl, true);
- iVBO.enableBuffer(gl, false);
- indices.seal(gl, true);
- indices.enableBuffer(gl, false);
- }
-
- public void dispose(final GL2ES2 gl) {
- iVBO.destroy(gl);
- indices.destroy(gl);
- }
- public void enableVBO(final GL2ES2 gl, final boolean enable) {
- iVBO.enableBuffer(gl, enable);
- indices.bindBuffer(gl, enable); // keeps VBO binding if enable:=true
- }
-
- public void updateUniform(final GL2ES2 gl, final ShaderProgram sp) {
- gl.glUniform(eyeToSourceUVScale);
- gl.glUniform(eyeToSourceUVOffset);
- if( useTimewarp() ) {
- gl.glUniform(eyeRotationStart);
- gl.glUniform(eyeRotationEnd);
- }
- }
-
- public void updateTimewarp(final OvrHmdContext hmdCtx, final ovrPosef eyeRenderPose, final float[] mat4Tmp1, final float[] mat4Tmp2) {
- final ovrMatrix4f[] timeWarpMatrices = new ovrMatrix4f[2];
- timeWarpMatrices[0] = ovrMatrix4f.create(); // FIXME: remove ctor / double check
- timeWarpMatrices[1] = ovrMatrix4f.create();
- OVR.ovrHmd_GetEyeTimewarpMatrices(hmdCtx, eyeName, eyeRenderPose, timeWarpMatrices);
-
- final float[] eyeRotationStartM = FloatUtil.transposeMatrix(timeWarpMatrices[0].getM(0, mat4Tmp1), mat4Tmp2);
- final FloatBuffer eyeRotationStartU = eyeRotationStart.floatBufferValue();
- eyeRotationStartU.put(eyeRotationStartM);
- eyeRotationStartU.rewind();
-
- final float[] eyeRotationEndM = FloatUtil.transposeMatrix(timeWarpMatrices[1].getM(0, mat4Tmp1), mat4Tmp2);
- final FloatBuffer eyeRotationEndU = eyeRotationEnd.floatBufferValue();
- eyeRotationEndU.put(eyeRotationEndM);
- eyeRotationEndU.rewind();
- }
-
- /**
- * Updates {@link #ovrEyePose} and it's extracted
- * {@link #eyeRenderPoseOrientation} and {@link #eyeRenderPosePosition}.
- * @param hmdCtx used get the {@link #ovrEyePose} via {@link OVR#ovrHmd_GetEyePose(OvrHmdContext, int)}
- */
- public EyePose updateEyePose(final OvrHmdContext hmdCtx) {
- ovrEyePose = OVR.ovrHmd_GetEyePose(hmdCtx, eyeName);
- final ovrVector3f pos = ovrEyePose.getPosition();
- eyePose.setPosition(pos.getX(), pos.getY(), pos.getZ());
- OVRUtil.copyToQuaternion(ovrEyePose.getOrientation(), eyePose.orientation);
- return eyePose;
- }
-
- @Override
- public String toString() {
- return "Eye["+eyeName+", viewport "+viewport[0]+"/"+viewport[1]+" "+viewport[2]+"x"+viewport[3]+
- ", "+eyeParameter+
- ", vertices "+vertexCount+", indices "+indexCount+
- ", uvScale["+eyeToSourceUVScale.floatBufferValue().get(0)+", "+eyeToSourceUVScale.floatBufferValue().get(1)+
- "], uvOffset["+eyeToSourceUVOffset.floatBufferValue().get(0)+", "+eyeToSourceUVOffset.floatBufferValue().get(1)+
- "], desc"+OVRUtil.toString(ovrEyeDesc)+", "+eyePose+"]";
- }
- }
-
- public final OvrHmdContext hmdCtx;
- public final EyeData[] eyes;
- public final int distortionCaps;
- public final int[/*2*/] textureSize;
- public final GLUniformData texUnit0;
- public final boolean usesDistMesh;
-
- private final float[] mat4Tmp1 = new float[16];
- private final float[] mat4Tmp2 = new float[16];
-
- private ShaderProgram sp;
-
- @Override
- public String toString() {
- return "OVRDist[caps 0x"+Integer.toHexString(distortionCaps)+", "+
- ", tex "+textureSize[0]+"x"+textureSize[1]+
- ", vignette "+useVignette()+", chromatic "+useChromatic()+", timewarp "+useTimewarp()+
- ", "+PlatformPropsImpl.NEWLINE+" "+eyes[0]+", "+PlatformPropsImpl.NEWLINE+" "+eyes[1]+"]";
- }
-
- public static OVRDistortion create(final OvrHmdContext hmdCtx, final boolean sbsSingleTexture,
- final float[] eyePositionOffset, final ovrFovPort[] eyeFov,
- final float pixelsPerDisplayPixel, final int distortionCaps) {
- final ovrEyeRenderDesc[] eyeRenderDesc = new ovrEyeRenderDesc[2];
- eyeRenderDesc[0] = OVR.ovrHmd_GetRenderDesc(hmdCtx, OVR.ovrEye_Left, eyeFov[0]);
- eyeRenderDesc[1] = OVR.ovrHmd_GetRenderDesc(hmdCtx, OVR.ovrEye_Right, eyeFov[1]);
- if( OVRUtil.DEBUG ) {
- System.err.println("XXX: eyeRenderDesc[0] "+OVRUtil.toString(eyeRenderDesc[0]));
- System.err.println("XXX: eyeRenderDesc[1] "+OVRUtil.toString(eyeRenderDesc[1]));
- }
-
- final ovrSizei recommenedTex0Size = OVR.ovrHmd_GetFovTextureSize(hmdCtx, OVR.ovrEye_Left, eyeRenderDesc[0].getFov(), pixelsPerDisplayPixel);
- final ovrSizei recommenedTex1Size = OVR.ovrHmd_GetFovTextureSize(hmdCtx, OVR.ovrEye_Right, eyeRenderDesc[1].getFov(), pixelsPerDisplayPixel);
- if( OVRUtil.DEBUG ) {
- System.err.println("XXX: recommenedTex0Size "+OVRUtil.toString(recommenedTex0Size));
- System.err.println("XXX: recommenedTex1Size "+OVRUtil.toString(recommenedTex1Size));
- }
- final int[] textureSize = new int[2];
- if( sbsSingleTexture ) {
- textureSize[0] = recommenedTex0Size.getW() + recommenedTex1Size.getW();
- } else {
- textureSize[0] = Math.max(recommenedTex0Size.getW(), recommenedTex1Size.getW());
- }
- textureSize[1] = Math.max(recommenedTex0Size.getH(), recommenedTex1Size.getH());
- if( OVRUtil.DEBUG ) {
- System.err.println("XXX: textureSize "+textureSize[0]+"x"+textureSize[1]);
- }
-
- final int[][] eyeRenderViewports = new int[2][4];
- if( sbsSingleTexture ) {
- eyeRenderViewports[0][0] = 0;
- eyeRenderViewports[0][1] = 0;
- eyeRenderViewports[0][2] = textureSize[0] / 2;
- eyeRenderViewports[0][3] = textureSize[1];
- eyeRenderViewports[1][0] = (textureSize[0] + 1) / 2;
- eyeRenderViewports[1][1] = 0;
- eyeRenderViewports[1][2] = textureSize[0] / 2;
- eyeRenderViewports[1][3] = textureSize[1];
- } else {
- eyeRenderViewports[0][0] = 0;
- eyeRenderViewports[0][1] = 0;
- eyeRenderViewports[0][2] = textureSize[0];
- eyeRenderViewports[0][3] = textureSize[1];
- eyeRenderViewports[1][0] = 0;
- eyeRenderViewports[1][1] = 0;
- eyeRenderViewports[1][2] = textureSize[0];
- eyeRenderViewports[1][3] = textureSize[1];
- }
- return new OVRDistortion(hmdCtx, sbsSingleTexture, eyePositionOffset, eyeRenderDesc, textureSize, eyeRenderViewports, distortionCaps, 0);
- }
-
- public OVRDistortion(final OvrHmdContext hmdCtx, final boolean sbsSingleTexture,
- final float[] eyePositionOffset, final ovrEyeRenderDesc[] eyeRenderDescs,
- final int[] textureSize, final int[][] eyeRenderViewports,
- final int distortionCaps, final int textureUnit) {
- this.hmdCtx = hmdCtx;
- this.eyes = new EyeData[2];
- this.distortionCaps = distortionCaps;
- this.textureSize = new int[2];
- System.arraycopy(textureSize, 0, this.textureSize, 0, 2);
-
- texUnit0 = new GLUniformData("ovr_Texture0", textureUnit);
- usesDistMesh = true;
-
- final ovrSizei ovrTextureSize = OVRUtil.createOVRSizei(textureSize);
- eyes[0] = new EyeData(hmdCtx, distortionCaps, eyePositionOffset, eyeRenderDescs[0], ovrTextureSize, eyeRenderViewports[0]);
- eyes[1] = new EyeData(hmdCtx, distortionCaps, eyePositionOffset, eyeRenderDescs[1], ovrTextureSize, eyeRenderViewports[1]);
- sp = null;
- }
-
- public final boolean useTimewarp() { return useTimewarp(distortionCaps); }
- public final boolean useChromatic() { return useChromatic(distortionCaps); }
- public final boolean useVignette() { return useVignette(distortionCaps); }
-
- public void updateTimewarp(final ovrPosef eyeRenderPose, final int eyeNum) {
- eyes[eyeNum].updateTimewarp(hmdCtx, eyeRenderPose, mat4Tmp1, mat4Tmp2);
- }
- public void updateTimewarp(final ovrPosef[] eyeRenderPoses) {
- eyes[0].updateTimewarp(hmdCtx, eyeRenderPoses[0], mat4Tmp1, mat4Tmp2);
- eyes[1].updateTimewarp(hmdCtx, eyeRenderPoses[1], mat4Tmp1, mat4Tmp2);
- }
-
- public void enableVBO(final GL2ES2 gl, final boolean enable, final int eyeNum) {
- if( null == sp ) {
- throw new IllegalStateException("Not initialized");
- }
- eyes[eyeNum].enableVBO(gl, enable);
- }
-
- public final ShaderProgram getShaderProgram() { return sp; }
-
- public void init(final GL2ES2 gl) {
- if( OVRUtil.DEBUG ) {
- System.err.println(JoglVersion.getGLInfo(gl, null).toString());
- }
- if( null != sp ) {
- throw new IllegalStateException("Already initialized");
- }
- final String vertexShaderBasename;
- final String fragmentShaderBasename;
- {
- final StringBuilder sb = new StringBuilder();
- sb.append(shaderPrefix01);
- if( !useChromatic() && !useTimewarp() ) {
- sb.append(shaderPlainSuffix);
- } else if( useChromatic() && !useTimewarp() ) {
- sb.append(shaderChromaSuffix);
- } else if( useTimewarp() ) {
- sb.append(shaderTimewarpSuffix);
- if( useChromatic() ) {
- sb.append(shaderChromaSuffix);
- }
- }
- vertexShaderBasename = sb.toString();
- sb.setLength(0);
- sb.append(shaderPrefix01);
- if( useChromatic() ) {
- sb.append(shaderChromaSuffix);
- } else {
- sb.append(shaderPlainSuffix);
- }
- fragmentShaderBasename = sb.toString();
- }
- final ShaderCode vp0 = ShaderCode.create(gl, GL2ES2.GL_VERTEX_SHADER, OVRDistortion.class, "shader",
- "shader/bin", vertexShaderBasename, true);
- final ShaderCode fp0 = ShaderCode.create(gl, GL2ES2.GL_FRAGMENT_SHADER, OVRDistortion.class, "shader",
- "shader/bin", fragmentShaderBasename, true);
- vp0.defaultShaderCustomization(gl, true, true);
- fp0.defaultShaderCustomization(gl, true, true);
-
- sp = new ShaderProgram();
- sp.add(gl, vp0, System.err);
- sp.add(gl, fp0, System.err);
- if(!sp.link(gl, System.err)) {
- throw new GLException("could not link program: "+sp);
- }
- sp.useProgram(gl, true);
- if( 0 > texUnit0.setLocation(gl, sp.program()) ) {
- throw new OVRException("Couldn't locate "+texUnit0);
- }
- eyes[0].linkData(gl, sp);
- eyes[1].linkData(gl, sp);
- sp.useProgram(gl, false);
- }
-
- public void dispose(final GL2ES2 gl) {
- sp.useProgram(gl, false);
- eyes[0].dispose(gl);
- eyes[1].dispose(gl);
- sp.destroy(gl);
- }
-
- public EyeParameter getEyeParam(final int eyeNum) {
- return eyes[eyeNum].eyeParameter;
- }
-
- /**
- * Updates the {@link EyeData#ovrEyePose} via {@link EyeData#updateEyePose(OvrHmdContext)}
- * for the denoted eye.
- */
- public EyePose updateEyePose(final int eyeNum) {
- return eyes[eyeNum].updateEyePose(hmdCtx);
- }
-
- public void updateUniforms(final GL2ES2 gl, final int eyeNum) {
- if( null == sp ) {
- throw new IllegalStateException("Not initialized");
- }
- gl.glUniform(texUnit0);
- eyes[eyeNum].updateUniform(gl, sp);
- }
-
- /**
- * <p>
- * {@link #updateEyePose(int)} must be called upfront
- * when rendering upstream {@link GLEventListener}.
- * </p>
- *
- * @param gl
- * @param timewarpPointSeconds
- */
- public void display(final GL2ES2 gl, final double timewarpPointSeconds) {
- if( null == sp ) {
- throw new IllegalStateException("Not initialized");
- }
- if( useTimewarp() ) {
- OVR.ovr_WaitTillTime(timewarpPointSeconds);
- }
- gl.glDisable(GL.GL_CULL_FACE);
- gl.glDisable(GL.GL_DEPTH_TEST);
- gl.glDisable(GL.GL_BLEND);
-
- if( !gl.isGLcore() ) {
- gl.glEnable(GL.GL_TEXTURE_2D);
- }
-
- sp.useProgram(gl, true);
-
- gl.glUniform(texUnit0);
-
- for(int eyeNum=0; eyeNum<2; eyeNum++) {
- final EyeData eye = eyes[eyeNum];
- if( useTimewarp() ) {
- eye.updateTimewarp(hmdCtx, eye.ovrEyePose, mat4Tmp1, mat4Tmp2);
- }
- eye.updateUniform(gl, sp);
- eye.enableVBO(gl, true);
- if( usesDistMesh ) {
- gl.glDrawElements(GL.GL_TRIANGLES, eye.indexCount, GL.GL_UNSIGNED_SHORT, 0);
- } else {
- gl.glDrawArrays(GL.GL_TRIANGLE_STRIP, 0, eye.vertexCount);
- }
- eyes[eyeNum].enableVBO(gl, false);
- }
-
- sp.useProgram(gl, false);
- }
-
- /**
- *
- * @param gl
- * @param timewarpPointSeconds
- */
- public void displayOneEyePre(final GL2ES2 gl, final double timewarpPointSeconds) {
- if( null == sp ) {
- throw new IllegalStateException("Not initialized");
- }
- if( useTimewarp() ) {
- OVR.ovr_WaitTillTime(timewarpPointSeconds);
- }
- gl.glDisable(GL.GL_CULL_FACE);
- gl.glDisable(GL.GL_DEPTH_TEST);
- gl.glDisable(GL.GL_BLEND);
-
- if( !gl.isGLcore() ) {
- gl.glEnable(GL.GL_TEXTURE_2D);
- }
-
- sp.useProgram(gl, true);
-
- gl.glUniform(texUnit0);
- }
-
- /**
- * <p>
- * {@link #updateEyePose(int)} must be called upfront
- * when rendering upstream {@link GLEventListener}.
- * </p>
- *
- * @param gl
- * @param eyeNum
- */
- public void displayOneEye(final GL2ES2 gl, final int eyeNum) {
- if( null == sp ) {
- throw new IllegalStateException("Not initialized");
- }
- final EyeData eye = eyes[eyeNum];
- if( useTimewarp() ) {
- eye.updateTimewarp(hmdCtx, eye.ovrEyePose, mat4Tmp1, mat4Tmp2);
- }
- eye.updateUniform(gl, sp);
- eye.enableVBO(gl, true);
- if( usesDistMesh ) {
- gl.glDrawElements(GL.GL_TRIANGLES, eye.indexCount, GL.GL_UNSIGNED_SHORT, 0);
- } else {
- gl.glDrawArrays(GL.GL_TRIANGLE_STRIP, 0, eye.vertexCount);
- }
- eyes[eyeNum].enableVBO(gl, false);
- }
-
- public void displayOneEyePost(final GL2ES2 gl) {
- sp.useProgram(gl, false);
- }
-
- /**
- * Calculates the <i>Side By Side</i>, SBS, projection- and modelview matrix for one eye.
- * <p>
- * {@link #updateEyePose(int)} must be called upfront.
- * </p>
- * <p>
- * This method merely exist as an example implementation to compute the matrices,
- * which shall be adopted by the
- * {@link CustomRendererListener#reshape(javax.media.opengl.GLAutoDrawable, int, int, int, int, EyeParameter, EyePose) upstream client code}.
- * </p>
- * @param eyeNum eye denominator
- * @param zNear frustum near value
- * @param zFar frustum far value
- * @param mat4Projection float[16] projection matrix result
- * @param mat4Modelview float[16] modelview matrix result
- * @deprecated Only an example implementation, which should be adopted by the {@link CustomRendererListener#reshape(javax.media.opengl.GLAutoDrawable, int, int, int, int, EyeParameter, EyePose) upstream client code}.
- */
- public void getSBSUpstreamPMV(final int eyeNum, final float zNear, final float zFar,
- final float[] mat4Projection, final float[] mat4Modelview) {
- final EyeData eyeDist = eyes[eyeNum];
-
- final float[] vec3Tmp1 = new float[3];
- final float[] vec3Tmp2 = new float[3];
- final float[] vec3Tmp3 = new float[3];
-
- //
- // Projection
- //
- FloatUtil.makePerspective(mat4Projection, 0, true, eyeDist.eyeParameter.fovhv, zNear, zFar);
-
- //
- // Modelview
- //
- final Quaternion rollPitchYaw = new Quaternion();
- // private final float eyeYaw = FloatUtil.PI; // 180 degrees in radians
- // rollPitchYaw.rotateByAngleY(eyeYaw);
- final float[] shiftedEyePos = rollPitchYaw.rotateVector(vec3Tmp1, 0, eyeDist.eyePose.position, 0);
- VectorUtil.addVec3(shiftedEyePos, shiftedEyePos, eyeDist.eyeParameter.positionOffset);
-
- rollPitchYaw.mult(eyeDist.eyePose.orientation);
- final float[] up = rollPitchYaw.rotateVector(vec3Tmp2, 0, VEC3_UP, 0);
- final float[] forward = rollPitchYaw.rotateVector(vec3Tmp3, 0, VEC3_FORWARD, 0);
- final float[] center = VectorUtil.addVec3(forward, shiftedEyePos, forward);
-
- final float[] mLookAt = FloatUtil.makeLookAt(mat4Tmp2, 0, shiftedEyePos, 0, center, 0, up, 0, mat4Tmp1);
- final float[] mViewAdjust = FloatUtil.makeTranslation(mat4Modelview, true,
- eyeDist.eyeParameter.distNoseToPupilX,
- eyeDist.eyeParameter.distMiddleToPupilY,
- eyeDist.eyeParameter.eyeReliefZ);
-
- /* mat4Modelview = */ FloatUtil.multMatrix(mViewAdjust, mLookAt);
- }
-}
diff --git a/src/oculusvr/classes/jogamp/opengl/oculusvr/OVRStereoDevice.java b/src/oculusvr/classes/jogamp/opengl/oculusvr/OVRStereoDevice.java
new file mode 100644
index 000000000..09a348c46
--- /dev/null
+++ b/src/oculusvr/classes/jogamp/opengl/oculusvr/OVRStereoDevice.java
@@ -0,0 +1,158 @@
+/**
+ * Copyright 2014 JogAmp Community. All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or without modification, are
+ * permitted provided that the following conditions are met:
+ *
+ * 1. Redistributions of source code must retain the above copyright notice, this list of
+ * conditions and the following disclaimer.
+ *
+ * 2. Redistributions in binary form must reproduce the above copyright notice, this list
+ * of conditions and the following disclaimer in the documentation and/or other materials
+ * provided with the distribution.
+ *
+ * THIS SOFTWARE IS PROVIDED BY JogAmp Community ``AS IS'' AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND
+ * FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL JogAmp Community OR
+ * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+ * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
+ * SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON
+ * ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
+ * NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
+ * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ *
+ * The views and conclusions contained in the software and documentation are those of the
+ * authors and should not be interpreted as representing official policies, either expressed
+ * or implied, of JogAmp Community.
+ */
+package jogamp.opengl.oculusvr;
+
+import javax.media.nativewindow.util.Dimension;
+import javax.media.nativewindow.util.DimensionImmutable;
+import javax.media.nativewindow.util.PointImmutable;
+import javax.media.nativewindow.util.Rectangle;
+import javax.media.nativewindow.util.RectangleImmutable;
+
+import com.jogamp.oculusvr.OVR;
+import com.jogamp.oculusvr.OvrHmdContext;
+import com.jogamp.oculusvr.ovrEyeRenderDesc;
+import com.jogamp.oculusvr.ovrFovPort;
+import com.jogamp.oculusvr.ovrHmdDesc;
+import com.jogamp.oculusvr.ovrSizei;
+import com.jogamp.opengl.math.FovHVHalves;
+import com.jogamp.opengl.util.stereo.StereoDevice;
+import com.jogamp.opengl.util.stereo.StereoDeviceRenderer;
+
+public class OVRStereoDevice implements StereoDevice {
+ public final OvrHmdContext handle;
+ public final int deviceIndex;
+ public final ovrHmdDesc hmdDesc;
+
+ private boolean sensorsStarted = false;
+
+ public OVRStereoDevice(final OvrHmdContext nativeContext, final int deviceIndex) {
+ this.handle = nativeContext;
+ this.deviceIndex = deviceIndex;
+ this.hmdDesc = ovrHmdDesc.create();
+ OVR.ovrHmd_GetDesc(handle, hmdDesc);
+ }
+
+ @Override
+ public final void dispose() {
+ // NOP
+ }
+
+ @Override
+ public final PointImmutable getPosition() {
+ return OVRUtil.getVec2iAsPoint(hmdDesc.getWindowsPos());
+ }
+
+ @Override
+ public final DimensionImmutable getSurfaceSize() {
+ return OVRUtil.getOVRSizei(hmdDesc.getResolution());
+ }
+
+ @Override
+ public final FovHVHalves[] getDefaultFOV() {
+ final ovrFovPort[] defaultEyeFov = hmdDesc.getDefaultEyeFov(0, new ovrFovPort[2]);
+ final FovHVHalves[] eyeFov = new FovHVHalves[2];
+ eyeFov[0] = OVRUtil.getFovHV(defaultEyeFov[0]);
+ eyeFov[1] = OVRUtil.getFovHV(defaultEyeFov[1]);
+ return eyeFov;
+ }
+
+ @Override
+ public final boolean startSensors(final boolean start) {
+ if( start && !sensorsStarted ) {
+ // Start the sensor which provides the Rift’s pose and motion.
+ final int requiredSensorCaps = 0;
+ final int supportedSensorCaps = requiredSensorCaps | OVR.ovrSensorCap_Orientation | OVR.ovrSensorCap_YawCorrection | OVR.ovrSensorCap_Position;
+ if( OVR.ovrHmd_StartSensor(handle, supportedSensorCaps, requiredSensorCaps) ) {
+ sensorsStarted = true;
+ return true;
+ } else {
+ sensorsStarted = false;
+ return false;
+ }
+ } else if( sensorsStarted ) {
+ OVR.ovrHmd_StopSensor(handle);
+ sensorsStarted = false;
+ return true;
+ } else {
+ // No state change -> Success
+ return true;
+ }
+ }
+ @Override
+ public boolean getSensorsStarted() { return sensorsStarted; }
+
+ @Override
+ public final StereoDeviceRenderer createRenderer(final int distortionBits,
+ final int textureCount, final float[] eyePositionOffset,
+ final FovHVHalves[] eyeFov, final float pixelsPerDisplayPixel, final int textureUnit) {
+ final ovrFovPort ovrEyeFov0 = OVRUtil.getOVRFovPort(eyeFov[0]);
+ final ovrFovPort ovrEyeFov1 = OVRUtil.getOVRFovPort(eyeFov[1]);
+
+ final ovrEyeRenderDesc[] eyeRenderDesc = new ovrEyeRenderDesc[2];
+ eyeRenderDesc[0] = OVR.ovrHmd_GetRenderDesc(handle, OVR.ovrEye_Left, ovrEyeFov0);
+ eyeRenderDesc[1] = OVR.ovrHmd_GetRenderDesc(handle, OVR.ovrEye_Right, ovrEyeFov1);
+ if( StereoDevice.DEBUG ) {
+ System.err.println("XXX: eyeRenderDesc[0] "+OVRUtil.toString(eyeRenderDesc[0]));
+ System.err.println("XXX: eyeRenderDesc[1] "+OVRUtil.toString(eyeRenderDesc[1]));
+ }
+
+ final ovrSizei recommenedTex0Size = OVR.ovrHmd_GetFovTextureSize(handle, OVR.ovrEye_Left, eyeRenderDesc[0].getFov(), pixelsPerDisplayPixel);
+ final ovrSizei recommenedTex1Size = OVR.ovrHmd_GetFovTextureSize(handle, OVR.ovrEye_Right, eyeRenderDesc[1].getFov(), pixelsPerDisplayPixel);
+ if( StereoDevice.DEBUG ) {
+ System.err.println("XXX: recommenedTex0Size "+OVRUtil.toString(recommenedTex0Size));
+ System.err.println("XXX: recommenedTex1Size "+OVRUtil.toString(recommenedTex1Size));
+ }
+ final int unifiedW = Math.max(recommenedTex0Size.getW(), recommenedTex1Size.getW());
+ final int unifiedH = Math.max(recommenedTex0Size.getH(), recommenedTex1Size.getH());
+
+ final DimensionImmutable singleTextureSize = new Dimension(unifiedW, unifiedH);
+ final DimensionImmutable totalTextureSize = new Dimension(recommenedTex0Size.getW() + recommenedTex1Size.getW(), unifiedH);
+ if( StereoDevice.DEBUG ) {
+ System.err.println("XXX: textureSize Single "+singleTextureSize);
+ System.err.println("XXX: textureSize Total "+totalTextureSize);
+ }
+
+ final RectangleImmutable[] eyeRenderViewports = new RectangleImmutable[2];
+ if( 1 == textureCount ) { // validated in ctor below!
+ eyeRenderViewports[0] = new Rectangle(0, 0,
+ totalTextureSize.getWidth() / 2,
+ totalTextureSize.getHeight());
+
+ eyeRenderViewports[1] = new Rectangle((totalTextureSize.getWidth() + 1) / 2, 0,
+ totalTextureSize.getWidth() / 2,
+ totalTextureSize.getHeight());
+ } else {
+ eyeRenderViewports[0] = new Rectangle(0, 0,
+ singleTextureSize.getWidth(),
+ singleTextureSize.getHeight());
+ eyeRenderViewports[1] = eyeRenderViewports[0];
+ }
+ return new OVRStereoDeviceRenderer(this, distortionBits, textureCount, eyePositionOffset,
+ eyeRenderDesc, singleTextureSize, totalTextureSize, eyeRenderViewports, textureUnit);
+ }
+} \ No newline at end of file
diff --git a/src/oculusvr/classes/jogamp/opengl/oculusvr/OVRStereoDeviceFactory.java b/src/oculusvr/classes/jogamp/opengl/oculusvr/OVRStereoDeviceFactory.java
new file mode 100644
index 000000000..06454e443
--- /dev/null
+++ b/src/oculusvr/classes/jogamp/opengl/oculusvr/OVRStereoDeviceFactory.java
@@ -0,0 +1,51 @@
+/**
+ * Copyright 2014 JogAmp Community. All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or without modification, are
+ * permitted provided that the following conditions are met:
+ *
+ * 1. Redistributions of source code must retain the above copyright notice, this list of
+ * conditions and the following disclaimer.
+ *
+ * 2. Redistributions in binary form must reproduce the above copyright notice, this list
+ * of conditions and the following disclaimer in the documentation and/or other materials
+ * provided with the distribution.
+ *
+ * THIS SOFTWARE IS PROVIDED BY JogAmp Community ``AS IS'' AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND
+ * FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL JogAmp Community OR
+ * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+ * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
+ * SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON
+ * ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
+ * NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
+ * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ *
+ * The views and conclusions contained in the software and documentation are those of the
+ * authors and should not be interpreted as representing official policies, either expressed
+ * or implied, of JogAmp Community.
+ */
+package jogamp.opengl.oculusvr;
+
+import com.jogamp.oculusvr.OVR;
+import com.jogamp.oculusvr.OVRVersion;
+import com.jogamp.oculusvr.OvrHmdContext;
+import com.jogamp.opengl.util.stereo.StereoDevice;
+import com.jogamp.opengl.util.stereo.StereoDeviceFactory;
+
+public class OVRStereoDeviceFactory extends StereoDeviceFactory {
+
+ public static boolean isAvailable() {
+ return OVR.ovr_Initialize(); // recursive ..
+ }
+
+ @Override
+ public final StereoDevice createDevice(final int deviceIndex, final boolean verbose) {
+ final OvrHmdContext hmdCtx = OVR.ovrHmd_Create(deviceIndex);
+ final OVRStereoDevice ctx = new OVRStereoDevice(hmdCtx, deviceIndex);
+ if( verbose ) {
+ System.err.println(OVRVersion.getAvailableCapabilitiesInfo(ctx.hmdDesc, deviceIndex, null).toString());
+ }
+ return ctx;
+ }
+}
diff --git a/src/oculusvr/classes/jogamp/opengl/oculusvr/OVRStereoDeviceRenderer.java b/src/oculusvr/classes/jogamp/opengl/oculusvr/OVRStereoDeviceRenderer.java
new file mode 100644
index 000000000..012ad183d
--- /dev/null
+++ b/src/oculusvr/classes/jogamp/opengl/oculusvr/OVRStereoDeviceRenderer.java
@@ -0,0 +1,590 @@
+/**
+ * Copyright 2014 JogAmp Community. All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or without modification, are
+ * permitted provided that the following conditions are met:
+ *
+ * 1. Redistributions of source code must retain the above copyright notice, this list of
+ * conditions and the following disclaimer.
+ *
+ * 2. Redistributions in binary form must reproduce the above copyright notice, this list
+ * of conditions and the following disclaimer in the documentation and/or other materials
+ * provided with the distribution.
+ *
+ * THIS SOFTWARE IS PROVIDED BY JogAmp Community ``AS IS'' AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND
+ * FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL JogAmp Community OR
+ * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+ * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
+ * SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON
+ * ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
+ * NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
+ * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ *
+ * The views and conclusions contained in the software and documentation are those of the
+ * authors and should not be interpreted as representing official policies, either expressed
+ * or implied, of JogAmp Community.
+ */
+package jogamp.opengl.oculusvr;
+
+import java.nio.FloatBuffer;
+import java.nio.ShortBuffer;
+
+import javax.media.nativewindow.util.DimensionImmutable;
+import javax.media.nativewindow.util.RectangleImmutable;
+import javax.media.opengl.GL;
+import javax.media.opengl.GL2ES2;
+import javax.media.opengl.GLArrayData;
+import javax.media.opengl.GLException;
+import javax.media.opengl.GLUniformData;
+
+import jogamp.common.os.PlatformPropsImpl;
+
+import com.jogamp.common.nio.Buffers;
+import com.jogamp.oculusvr.OVR;
+import com.jogamp.oculusvr.OVRException;
+import com.jogamp.oculusvr.OvrHmdContext;
+import com.jogamp.oculusvr.ovrDistortionMesh;
+import com.jogamp.oculusvr.ovrDistortionVertex;
+import com.jogamp.oculusvr.ovrEyeRenderDesc;
+import com.jogamp.oculusvr.ovrFovPort;
+import com.jogamp.oculusvr.ovrFrameTiming;
+import com.jogamp.oculusvr.ovrMatrix4f;
+import com.jogamp.oculusvr.ovrPosef;
+import com.jogamp.oculusvr.ovrRecti;
+import com.jogamp.oculusvr.ovrSizei;
+import com.jogamp.oculusvr.ovrVector2f;
+import com.jogamp.oculusvr.ovrVector3f;
+import com.jogamp.opengl.JoglVersion;
+import com.jogamp.opengl.math.FloatUtil;
+import com.jogamp.opengl.util.GLArrayDataServer;
+import com.jogamp.opengl.util.glsl.ShaderCode;
+import com.jogamp.opengl.util.glsl.ShaderProgram;
+import com.jogamp.opengl.util.stereo.EyeParameter;
+import com.jogamp.opengl.util.stereo.EyePose;
+import com.jogamp.opengl.util.stereo.StereoDevice;
+import com.jogamp.opengl.util.stereo.StereoDeviceRenderer;
+import com.jogamp.opengl.util.stereo.StereoUtil;
+
+/**
+ * OculusVR Distortion Data and OpenGL Renderer Utility
+ */
+public class OVRStereoDeviceRenderer implements StereoDeviceRenderer {
+ private static final String shaderPrefix01 = "dist01";
+ private static final String shaderTimewarpSuffix = "_timewarp";
+ private static final String shaderChromaSuffix = "_chroma";
+ private static final String shaderPlainSuffix = "_plain";
+
+ public static class OVREye implements StereoDeviceRenderer.Eye {
+ private final int eyeName;
+ private final int distortionBits;
+ private final int vertexCount;
+ private final int indexCount;
+ private final RectangleImmutable viewport;
+
+ private final GLUniformData eyeToSourceUVScale;
+ private final GLUniformData eyeToSourceUVOffset;
+ private final GLUniformData eyeRotationStart;
+ private final GLUniformData eyeRotationEnd;
+
+ /** 2+2+2+2+2: { vec2 position, vec2 color, vec2 texCoordR, vec2 texCoordG, vec2 texCoordB } */
+ private final GLArrayDataServer iVBO;
+ private final GLArrayData vboPos, vboParams, vboTexCoordsR, vboTexCoordsG, vboTexCoordsB;
+ private final GLArrayDataServer indices;
+
+ private final ovrEyeRenderDesc ovrEyeDesc;
+ private final ovrFovPort ovrEyeFov;
+ private final EyeParameter eyeParameter;
+
+ private ovrPosef ovrEyePose;
+ private final EyePose eyePose;
+
+ @Override
+ public final RectangleImmutable getViewport() { return viewport; }
+
+ @Override
+ public final EyeParameter getEyeParameter() { return eyeParameter; }
+
+ @Override
+ public final EyePose getLastEyePose() { return eyePose; }
+
+ private OVREye(final OvrHmdContext hmdCtx, final int distortionBits,
+ final float[] eyePositionOffset, final ovrEyeRenderDesc eyeDesc,
+ final ovrSizei ovrTextureSize, final RectangleImmutable eyeViewport) {
+ this.eyeName = eyeDesc.getEye();
+ this.distortionBits = distortionBits;
+ this.viewport = eyeViewport;
+
+ final boolean usesTimewarp = StereoUtil.usesTimewarpDistortion(distortionBits);
+ final FloatBuffer fstash = Buffers.newDirectFloatBuffer( 2 + 2 + ( usesTimewarp ? 16 + 16 : 0 ) ) ;
+
+ eyeToSourceUVScale = new GLUniformData("ovr_EyeToSourceUVScale", 2, Buffers.slice2Float(fstash, 0, 2));
+ eyeToSourceUVOffset = new GLUniformData("ovr_EyeToSourceUVOffset", 2, Buffers.slice2Float(fstash, 2, 2));
+
+ if( usesTimewarp ) {
+ eyeRotationStart = new GLUniformData("ovr_EyeRotationStart", 4, 4, Buffers.slice2Float(fstash, 4, 16));
+ eyeRotationEnd = new GLUniformData("ovr_EyeRotationEnd", 4, 4, Buffers.slice2Float(fstash, 20, 16));
+ } else {
+ eyeRotationStart = null;
+ eyeRotationEnd = null;
+ }
+
+ this.ovrEyeDesc = eyeDesc;
+ this.ovrEyeFov = eyeDesc.getFov();
+
+ final ovrVector3f eyeViewAdjust = eyeDesc.getViewAdjust();
+ this.eyeParameter = new EyeParameter(eyeName, eyePositionOffset, OVRUtil.getFovHV(ovrEyeFov),
+ eyeViewAdjust.getX(), eyeViewAdjust.getY(), eyeViewAdjust.getZ());
+
+ this.eyePose = new EyePose(eyeName);
+
+ updateEyePose(hmdCtx); // 1st init
+
+ final ovrDistortionMesh meshData = ovrDistortionMesh.create();
+ final ovrFovPort fov = eyeDesc.getFov();
+
+ final int ovrDistortionCaps = distBits2OVRDistCaps(distortionBits);
+ if( !OVR.ovrHmd_CreateDistortionMesh(hmdCtx, eyeName, fov, ovrDistortionCaps, meshData) ) {
+ throw new OVRException("Failed to create meshData for eye "+eyeName+", "+OVRUtil.toString(fov)+" and "+StereoUtil.distortionBitsToString(distortionBits));
+ }
+ vertexCount = meshData.getVertexCount();
+ indexCount = meshData.getIndexCount();
+
+ /** 2+2+2+2+2: { vec2 position, vec2 color, vec2 texCoordR, vec2 texCoordG, vec2 texCoordB } */
+ final boolean useChromatic = StereoUtil.usesChromaticDistortion(distortionBits);
+ final boolean useVignette = StereoUtil.usesVignetteDistortion(distortionBits);
+
+ final int compsPerElement = 2+2+2+( useChromatic ? 2+2 /* texCoordG + texCoordB */: 0 );
+ iVBO = GLArrayDataServer.createGLSLInterleaved(compsPerElement, GL.GL_FLOAT, false, vertexCount, GL.GL_STATIC_DRAW);
+ vboPos = iVBO.addGLSLSubArray("ovr_Position", 2, GL.GL_ARRAY_BUFFER);
+ vboParams = iVBO.addGLSLSubArray("ovr_Params", 2, GL.GL_ARRAY_BUFFER);
+ vboTexCoordsR = iVBO.addGLSLSubArray("ovr_TexCoordR", 2, GL.GL_ARRAY_BUFFER);
+ if( useChromatic ) {
+ vboTexCoordsG = iVBO.addGLSLSubArray("ovr_TexCoordG", 2, GL.GL_ARRAY_BUFFER);
+ vboTexCoordsB = iVBO.addGLSLSubArray("ovr_TexCoordB", 2, GL.GL_ARRAY_BUFFER);
+ } else {
+ vboTexCoordsG = null;
+ vboTexCoordsB = null;
+ }
+ indices = GLArrayDataServer.createData(1, GL.GL_SHORT, indexCount, GL.GL_STATIC_DRAW, GL.GL_ELEMENT_ARRAY_BUFFER);
+
+ // Setup: eyeToSourceUVScale, eyeToSourceUVOffset
+ {
+ final ovrVector2f[] uvScaleOffsetOut = new ovrVector2f[2];
+ uvScaleOffsetOut[0] = ovrVector2f.create(); // FIXME: remove ctor / double check
+ uvScaleOffsetOut[1] = ovrVector2f.create();
+
+ final ovrRecti ovrEyeRenderViewport = OVRUtil.createOVRRecti(eyeViewport);
+ OVR.ovrHmd_GetRenderScaleAndOffset(fov, ovrTextureSize, ovrEyeRenderViewport, uvScaleOffsetOut);
+ if( StereoDevice.DEBUG ) {
+ System.err.println("XXX."+eyeName+": fov "+OVRUtil.toString(fov));
+ System.err.println("XXX."+eyeName+": uvScale "+OVRUtil.toString(uvScaleOffsetOut[0]));
+ System.err.println("XXX."+eyeName+": uvOffset "+OVRUtil.toString(uvScaleOffsetOut[1]));
+ System.err.println("XXX."+eyeName+": textureSize "+OVRUtil.toString(ovrTextureSize));
+ System.err.println("XXX."+eyeName+": viewport "+OVRUtil.toString(ovrEyeRenderViewport));
+ }
+ final FloatBuffer eyeToSourceUVScaleFB = eyeToSourceUVScale.floatBufferValue();
+ eyeToSourceUVScaleFB.put(0, uvScaleOffsetOut[0].getX());
+ eyeToSourceUVScaleFB.put(1, uvScaleOffsetOut[0].getY());
+ final FloatBuffer eyeToSourceUVOffsetFB = eyeToSourceUVOffset.floatBufferValue();
+ eyeToSourceUVOffsetFB.put(0, uvScaleOffsetOut[1].getX());
+ eyeToSourceUVOffsetFB.put(1, uvScaleOffsetOut[1].getY());
+ }
+
+ /** 2+2+2+2+2: { vec2 position, vec2 color, vec2 texCoordR, vec2 texCoordG, vec2 texCoordB } */
+ final FloatBuffer iVBOFB = (FloatBuffer)iVBO.getBuffer();
+ final ovrDistortionVertex[] ovRes = new ovrDistortionVertex[1];
+ ovRes[0] = ovrDistortionVertex.create(); // FIXME: remove ctor / double check
+
+ for ( int vertNum = 0; vertNum < vertexCount; vertNum++ ) {
+ final ovrDistortionVertex ov = meshData.getPVertexData(vertNum, ovRes)[0];
+ ovrVector2f v;
+
+ // pos
+ v = ov.getPos();
+ iVBOFB.put(v.getX());
+ iVBOFB.put(v.getY());
+
+ // params
+ if( useVignette ) {
+ iVBOFB.put(ov.getVignetteFactor());
+ } else {
+ iVBOFB.put(1.0f);
+ }
+ iVBOFB.put(ov.getTimeWarpFactor());
+
+ // texCoordR
+ v = ov.getTexR();
+ iVBOFB.put(v.getX());
+ iVBOFB.put(v.getY());
+
+ if( useChromatic ) {
+ // texCoordG
+ v = ov.getTexG();
+ iVBOFB.put(v.getX());
+ iVBOFB.put(v.getY());
+
+ // texCoordB
+ v = ov.getTexB();
+ iVBOFB.put(v.getX());
+ iVBOFB.put(v.getY());
+ }
+ }
+ if( StereoDevice.DEBUG ) {
+ System.err.println("XXX."+eyeName+": iVBO "+iVBO);
+ }
+ {
+ final ShortBuffer in = meshData.getPIndexData();
+ final ShortBuffer out = (ShortBuffer) indices.getBuffer();
+ out.put(in);
+ }
+ if( StereoDevice.DEBUG ) {
+ System.err.println("XXX."+eyeName+": idx "+indices);
+ System.err.println("XXX."+eyeName+": "+this);
+ }
+ OVR.ovrHmd_DestroyDistortionMesh(meshData);
+ }
+
+ private void linkData(final GL2ES2 gl, final ShaderProgram sp) {
+ if( 0 > vboPos.setLocation(gl, sp.program()) ) {
+ throw new OVRException("Couldn't locate "+vboPos);
+ }
+ if( 0 > vboParams.setLocation(gl, sp.program()) ) {
+ throw new OVRException("Couldn't locate "+vboParams);
+ }
+ if( 0 > vboTexCoordsR.setLocation(gl, sp.program()) ) {
+ throw new OVRException("Couldn't locate "+vboTexCoordsR);
+ }
+ if( StereoUtil.usesChromaticDistortion(distortionBits) ) {
+ if( 0 > vboTexCoordsG.setLocation(gl, sp.program()) ) {
+ throw new OVRException("Couldn't locate "+vboTexCoordsG);
+ }
+ if( 0 > vboTexCoordsB.setLocation(gl, sp.program()) ) {
+ throw new OVRException("Couldn't locate "+vboTexCoordsB);
+ }
+ }
+ if( 0 > eyeToSourceUVScale.setLocation(gl, sp.program()) ) {
+ throw new OVRException("Couldn't locate "+eyeToSourceUVScale);
+ }
+ if( 0 > eyeToSourceUVOffset.setLocation(gl, sp.program()) ) {
+ throw new OVRException("Couldn't locate "+eyeToSourceUVOffset);
+ }
+ if( StereoUtil.usesTimewarpDistortion(distortionBits) ) {
+ if( 0 > eyeRotationStart.setLocation(gl, sp.program()) ) {
+ throw new OVRException("Couldn't locate "+eyeRotationStart);
+ }
+ if( 0 > eyeRotationEnd.setLocation(gl, sp.program()) ) {
+ throw new OVRException("Couldn't locate "+eyeRotationEnd);
+ }
+ }
+ iVBO.seal(gl, true);
+ iVBO.enableBuffer(gl, false);
+ indices.seal(gl, true);
+ indices.enableBuffer(gl, false);
+ }
+
+ private void dispose(final GL2ES2 gl) {
+ iVBO.destroy(gl);
+ indices.destroy(gl);
+ }
+ private void enableVBO(final GL2ES2 gl, final boolean enable) {
+ iVBO.enableBuffer(gl, enable);
+ indices.bindBuffer(gl, enable); // keeps VBO binding if enable:=true
+ }
+
+ private void updateUniform(final GL2ES2 gl, final ShaderProgram sp) {
+ gl.glUniform(eyeToSourceUVScale);
+ gl.glUniform(eyeToSourceUVOffset);
+ if( StereoUtil.usesTimewarpDistortion(distortionBits) ) {
+ gl.glUniform(eyeRotationStart);
+ gl.glUniform(eyeRotationEnd);
+ }
+ }
+
+ private void updateTimewarp(final OvrHmdContext hmdCtx, final ovrPosef eyeRenderPose, final float[] mat4Tmp1, final float[] mat4Tmp2) {
+ final ovrMatrix4f[] timeWarpMatrices = new ovrMatrix4f[2];
+ timeWarpMatrices[0] = ovrMatrix4f.create(); // FIXME: remove ctor / double check
+ timeWarpMatrices[1] = ovrMatrix4f.create();
+ OVR.ovrHmd_GetEyeTimewarpMatrices(hmdCtx, eyeName, eyeRenderPose, timeWarpMatrices);
+
+ final float[] eyeRotationStartM = FloatUtil.transposeMatrix(timeWarpMatrices[0].getM(0, mat4Tmp1), mat4Tmp2);
+ final FloatBuffer eyeRotationStartU = eyeRotationStart.floatBufferValue();
+ eyeRotationStartU.put(eyeRotationStartM);
+ eyeRotationStartU.rewind();
+
+ final float[] eyeRotationEndM = FloatUtil.transposeMatrix(timeWarpMatrices[1].getM(0, mat4Tmp1), mat4Tmp2);
+ final FloatBuffer eyeRotationEndU = eyeRotationEnd.floatBufferValue();
+ eyeRotationEndU.put(eyeRotationEndM);
+ eyeRotationEndU.rewind();
+ }
+
+ /**
+ * Updates {@link #ovrEyePose} and it's extracted
+ * {@link #eyeRenderPoseOrientation} and {@link #eyeRenderPosePosition}.
+ * @param hmdCtx used get the {@link #ovrEyePose} via {@link OVR#ovrHmd_GetEyePose(OvrHmdContext, int)}
+ */
+ private EyePose updateEyePose(final OvrHmdContext hmdCtx) {
+ ovrEyePose = OVR.ovrHmd_GetEyePose(hmdCtx, eyeName);
+ final ovrVector3f pos = ovrEyePose.getPosition();
+ eyePose.setPosition(pos.getX(), pos.getY(), pos.getZ());
+ OVRUtil.copyToQuaternion(ovrEyePose.getOrientation(), eyePose.orientation);
+ return eyePose;
+ }
+
+ @Override
+ public String toString() {
+ return "Eye["+eyeName+", viewport "+viewport+
+ ", "+eyeParameter+
+ ", vertices "+vertexCount+", indices "+indexCount+
+ ", uvScale["+eyeToSourceUVScale.floatBufferValue().get(0)+", "+eyeToSourceUVScale.floatBufferValue().get(1)+
+ "], uvOffset["+eyeToSourceUVOffset.floatBufferValue().get(0)+", "+eyeToSourceUVOffset.floatBufferValue().get(1)+
+ "], desc"+OVRUtil.toString(ovrEyeDesc)+", "+eyePose+"]";
+ }
+ }
+
+ private final OVRStereoDevice context;
+ private final OVREye[] eyes;
+ private final int distortionBits;
+ private final int textureCount;
+ private final DimensionImmutable singleTextureSize;
+ private final DimensionImmutable totalTextureSize;
+ private final GLUniformData texUnit0;
+
+
+ private final float[] mat4Tmp1 = new float[16];
+ private final float[] mat4Tmp2 = new float[16];
+
+ private ShaderProgram sp;
+ private ovrFrameTiming frameTiming;
+
+ @Override
+ public String toString() {
+ return "OVRDist[distortion["+StereoUtil.distortionBitsToString(distortionBits)+
+ "], singleSize "+singleTextureSize+
+ ", sbsSize "+totalTextureSize+
+ ", texCount "+textureCount+", texUnit "+getTextureUnit()+
+ ", "+PlatformPropsImpl.NEWLINE+" "+eyes[0]+", "+PlatformPropsImpl.NEWLINE+" "+eyes[1]+"]";
+ }
+
+
+ private static int distBits2OVRDistCaps(final int distortionBits) {
+ int bits = 0;
+ if( StereoUtil.usesTimewarpDistortion(distortionBits) ) {
+ bits |= OVR.ovrDistortionCap_TimeWarp;
+ }
+ if( StereoUtil.usesChromaticDistortion(distortionBits) ) {
+ bits |= OVR.ovrDistortionCap_Chromatic;
+ }
+ if( StereoUtil.usesVignetteDistortion(distortionBits) ) {
+ bits |= OVR.ovrDistortionCap_Vignette;
+ }
+ return bits;
+ }
+
+ /* pp */ OVRStereoDeviceRenderer(final OVRStereoDevice context, final int distortionBits,
+ final int textureCount, final float[] eyePositionOffset,
+ final ovrEyeRenderDesc[] eyeRenderDescs, final DimensionImmutable singleTextureSize, final DimensionImmutable totalTextureSize,
+ final RectangleImmutable[] eyeViewports, final int textureUnit) {
+ if( 1 > textureCount || 2 < textureCount ) {
+ throw new IllegalArgumentException("textureCount can only be 1 or 2, has "+textureCount);
+ }
+ this.context = context;
+ this.eyes = new OVREye[2];
+ this.distortionBits = distortionBits | StereoDeviceRenderer.DISTORTION_BARREL /* always */;
+ this.textureCount = textureCount;
+ this.singleTextureSize = singleTextureSize;
+ this.totalTextureSize = totalTextureSize;
+
+ texUnit0 = new GLUniformData("ovr_Texture0", textureUnit);
+
+ final ovrSizei ovrTextureSize = OVRUtil.createOVRSizei( 1 == textureCount ? totalTextureSize : singleTextureSize );
+ eyes[0] = new OVREye(context.handle, this.distortionBits, eyePositionOffset, eyeRenderDescs[0], ovrTextureSize, eyeViewports[0]);
+ eyes[1] = new OVREye(context.handle, this.distortionBits, eyePositionOffset, eyeRenderDescs[1], ovrTextureSize, eyeViewports[1]);
+ sp = null;
+ frameTiming = null;
+ }
+
+ @Override
+ public StereoDevice getDevice() {
+ return context;
+ }
+
+ @Override
+ public final int getDistortionBits() { return distortionBits; }
+
+ @Override
+ public final boolean usesSideBySideStereo() { return true; }
+
+ @Override
+ public final DimensionImmutable getSingleSurfaceSize() { return singleTextureSize; }
+
+ @Override
+ public final DimensionImmutable getTotalSurfaceSize() { return totalTextureSize; }
+
+ @Override
+ public final int getTextureCount() { return textureCount; }
+
+ @Override
+ public final int getTextureUnit() { return texUnit0.intValue(); }
+
+ @Override
+ public final boolean ppRequired() { return true; }
+
+ @Override
+ public final void init(final GL gl) {
+ if( StereoDevice.DEBUG ) {
+ System.err.println(JoglVersion.getGLInfo(gl, null).toString());
+ }
+ if( null != sp ) {
+ throw new IllegalStateException("Already initialized");
+ }
+ final GL2ES2 gl2es2 = gl.getGL2ES2();
+
+ final String vertexShaderBasename;
+ final String fragmentShaderBasename;
+ {
+ final boolean usesTimewarp = StereoUtil.usesTimewarpDistortion(distortionBits);
+ final boolean usesChromatic = StereoUtil.usesChromaticDistortion(distortionBits);
+
+ final StringBuilder sb = new StringBuilder();
+ sb.append(shaderPrefix01);
+ if( !usesChromatic && !usesTimewarp ) {
+ sb.append(shaderPlainSuffix);
+ } else if( usesChromatic && !usesTimewarp ) {
+ sb.append(shaderChromaSuffix);
+ } else if( usesTimewarp ) {
+ sb.append(shaderTimewarpSuffix);
+ if( usesChromatic ) {
+ sb.append(shaderChromaSuffix);
+ }
+ }
+ vertexShaderBasename = sb.toString();
+ sb.setLength(0);
+ sb.append(shaderPrefix01);
+ if( usesChromatic ) {
+ sb.append(shaderChromaSuffix);
+ } else {
+ sb.append(shaderPlainSuffix);
+ }
+ fragmentShaderBasename = sb.toString();
+ }
+ final ShaderCode vp0 = ShaderCode.create(gl2es2, GL2ES2.GL_VERTEX_SHADER, OVRStereoDeviceRenderer.class, "shader",
+ "shader/bin", vertexShaderBasename, true);
+ final ShaderCode fp0 = ShaderCode.create(gl2es2, GL2ES2.GL_FRAGMENT_SHADER, OVRStereoDeviceRenderer.class, "shader",
+ "shader/bin", fragmentShaderBasename, true);
+ vp0.defaultShaderCustomization(gl2es2, true, true);
+ fp0.defaultShaderCustomization(gl2es2, true, true);
+
+ sp = new ShaderProgram();
+ sp.add(gl2es2, vp0, System.err);
+ sp.add(gl2es2, fp0, System.err);
+ if(!sp.link(gl2es2, System.err)) {
+ throw new GLException("could not link program: "+sp);
+ }
+ sp.useProgram(gl2es2, true);
+ if( 0 > texUnit0.setLocation(gl2es2, sp.program()) ) {
+ throw new OVRException("Couldn't locate "+texUnit0);
+ }
+ eyes[0].linkData(gl2es2, sp);
+ eyes[1].linkData(gl2es2, sp);
+ sp.useProgram(gl2es2, false);
+ }
+
+ @Override
+ public final void dispose(final GL gl) {
+ final GL2ES2 gl2es2 = gl.getGL2ES2();
+ sp.useProgram(gl2es2, false);
+ eyes[0].dispose(gl2es2);
+ eyes[1].dispose(gl2es2);
+ sp.destroy(gl2es2);
+ frameTiming = null;
+ }
+
+ @Override
+ public final Eye getEye(final int eyeNum) {
+ return eyes[eyeNum];
+ }
+
+ @Override
+ public final EyePose updateEyePose(final int eyeNum) {
+ return eyes[eyeNum].updateEyePose(context.handle);
+ }
+
+ @Override
+ public final void beginFrame(final GL gl) {
+ frameTiming = OVR.ovrHmd_BeginFrameTiming(context.handle, 0);
+ }
+
+ @Override
+ public final void endFrame(final GL gl) {
+ if( null == frameTiming ) {
+ throw new IllegalStateException("beginFrame not called");
+ }
+ OVR.ovrHmd_EndFrameTiming(context.handle);
+ frameTiming = null;
+ }
+
+ @Override
+ public final void ppBegin(final GL gl) {
+ if( null == sp ) {
+ throw new IllegalStateException("Not initialized");
+ }
+ if( null == frameTiming ) {
+ throw new IllegalStateException("beginFrame not called");
+ }
+ if( StereoUtil.usesTimewarpDistortion(distortionBits) ) {
+ OVR.ovr_WaitTillTime(frameTiming.getTimewarpPointSeconds());
+ }
+ final GL2ES2 gl2es2 = gl.getGL2ES2();
+
+ gl.glClearColor(0.0f, 0.0f, 0.0f, 0.0f);
+ gl.glClear(GL.GL_COLOR_BUFFER_BIT);
+ gl.glActiveTexture(GL.GL_TEXTURE0 + getTextureUnit());
+
+ gl2es2.glDisable(GL.GL_CULL_FACE);
+ gl2es2.glDisable(GL.GL_DEPTH_TEST);
+ gl2es2.glDisable(GL.GL_BLEND);
+
+ if( !gl2es2.isGLcore() ) {
+ gl2es2.glEnable(GL.GL_TEXTURE_2D);
+ }
+
+ sp.useProgram(gl2es2, true);
+
+ gl2es2.glUniform(texUnit0);
+ }
+
+ @Override
+ public final void ppBothEyes(final GL gl) {
+ final GL2ES2 gl2es2 = gl.getGL2ES2();
+ for(int eyeNum=0; eyeNum<2; eyeNum++) {
+ final OVREye eye = eyes[eyeNum];
+ if( StereoUtil.usesTimewarpDistortion(distortionBits) ) {
+ eye.updateTimewarp(context.handle, eye.ovrEyePose, mat4Tmp1, mat4Tmp2);
+ }
+ eye.updateUniform(gl2es2, sp);
+ eye.enableVBO(gl2es2, true);
+ gl2es2.glDrawElements(GL.GL_TRIANGLES, eye.indexCount, GL.GL_UNSIGNED_SHORT, 0);
+ eyes[eyeNum].enableVBO(gl2es2, false);
+ }
+ }
+
+ @Override
+ public final void ppOneEye(final GL gl, final int eyeNum) {
+ final OVREye eye = eyes[eyeNum];
+ if( StereoUtil.usesTimewarpDistortion(distortionBits) ) {
+ eye.updateTimewarp(context.handle, eye.ovrEyePose, mat4Tmp1, mat4Tmp2);
+ }
+ final GL2ES2 gl2es2 = gl.getGL2ES2();
+
+ eye.updateUniform(gl2es2, sp);
+ eye.enableVBO(gl2es2, true);
+ gl2es2.glDrawElements(GL.GL_TRIANGLES, eye.indexCount, GL.GL_UNSIGNED_SHORT, 0);
+ eyes[eyeNum].enableVBO(gl2es2, false);
+ }
+
+ @Override
+ public final void ppEnd(final GL gl) {
+ sp.useProgram(gl.getGL2ES2(), false);
+ }
+}
diff --git a/src/oculusvr/classes/jogamp/opengl/oculusvr/OVRUtil.java b/src/oculusvr/classes/jogamp/opengl/oculusvr/OVRUtil.java
index 6c1cdc015..4de05fc92 100644
--- a/src/oculusvr/classes/jogamp/opengl/oculusvr/OVRUtil.java
+++ b/src/oculusvr/classes/jogamp/opengl/oculusvr/OVRUtil.java
@@ -27,7 +27,11 @@
*/
package jogamp.opengl.oculusvr;
-import jogamp.opengl.Debug;
+import javax.media.nativewindow.util.Dimension;
+import javax.media.nativewindow.util.DimensionImmutable;
+import javax.media.nativewindow.util.Point;
+import javax.media.nativewindow.util.PointImmutable;
+import javax.media.nativewindow.util.RectangleImmutable;
import com.jogamp.oculusvr.ovrEyeRenderDesc;
import com.jogamp.oculusvr.ovrFovPort;
@@ -44,8 +48,6 @@ import com.jogamp.opengl.math.Quaternion;
* OculusVR Data Conversion Helper Functions
*/
public class OVRUtil {
- public static final boolean DEBUG = Debug.debug("OVR");
-
public static ovrRecti createOVRRecti(final int[] rect) {
final ovrRecti res = ovrRecti.create();
final ovrVector2i pos = res.getPos();
@@ -56,6 +58,16 @@ public class OVRUtil {
size.setH(rect[3]);
return res;
}
+ public static ovrRecti createOVRRecti(final RectangleImmutable rect) {
+ final ovrRecti res = ovrRecti.create();
+ final ovrVector2i pos = res.getPos();
+ final ovrSizei size = res.getSize();
+ pos.setX(rect.getX());
+ pos.setY(rect.getY());
+ size.setW(rect.getWidth());
+ size.setH(rect.getHeight());
+ return res;
+ }
public static ovrRecti[] createOVRRectis(final int[][] rects) {
final ovrRecti[] res = new ovrRecti[rects.length];
for(int i=0; i<res.length; i++) {
@@ -69,11 +81,24 @@ public class OVRUtil {
res.setH(size[1]);
return res;
}
- public static Quaternion getQuaternion(final ovrQuatf q) {
- return new Quaternion(q.getX(), q.getY(), q.getZ(), q.getW());
+ public static ovrSizei createOVRSizei(final DimensionImmutable size) {
+ final ovrSizei res = ovrSizei.create();
+ res.setW(size.getWidth());
+ res.setH(size.getHeight());
+ return res;
}
- public static void copyToQuaternion(final ovrQuatf in, final Quaternion out) {
- out.set(in.getX(), in.getY(), in.getZ(), in.getW());
+ public static DimensionImmutable getOVRSizei(final ovrSizei v) {
+ return new Dimension(v.getW(), v.getH());
+ }
+ public static PointImmutable getVec2iAsPoint(final ovrVector2i v) {
+ return new Point(v.getX(), v.getY());
+ }
+ public static int[] getVec2i(final ovrVector2i v) {
+ return new int[] { v.getX(), v.getY() };
+ }
+ public static void copyVec2iToInt(final ovrVector2i v, final int[] res) {
+ res[0] = v.getX();
+ res[1] = v.getY();
}
public static float[] getVec3f(final ovrVector3f v) {
return new float[] { v.getX(), v.getY(), v.getZ() };
@@ -83,12 +108,33 @@ public class OVRUtil {
res[1] = v.getY();
res[2] = v.getZ();
}
+ public static Quaternion getQuaternion(final ovrQuatf q) {
+ return new Quaternion(q.getX(), q.getY(), q.getZ(), q.getW());
+ }
+ public static void copyToQuaternion(final ovrQuatf in, final Quaternion out) {
+ out.set(in.getX(), in.getY(), in.getZ(), in.getW());
+ }
public static FovHVHalves getFovHV(final ovrFovPort tanHalfFov) {
return new FovHVHalves(tanHalfFov.getLeftTan(), tanHalfFov.getRightTan(),
tanHalfFov.getUpTan(), tanHalfFov.getDownTan(),
true);
}
+ public static ovrFovPort getOVRFovPort(final FovHVHalves fovHVHalves) {
+ final ovrFovPort tanHalfFov = ovrFovPort.create();
+ if( fovHVHalves.inTangents ) {
+ tanHalfFov.setLeftTan(fovHVHalves.left);
+ tanHalfFov.setRightTan(fovHVHalves.right);
+ tanHalfFov.setUpTan(fovHVHalves.top);
+ tanHalfFov.setDownTan(fovHVHalves.bottom);
+ } else {
+ tanHalfFov.setLeftTan((float)Math.tan(fovHVHalves.left));
+ tanHalfFov.setRightTan((float)Math.tan(fovHVHalves.right));
+ tanHalfFov.setUpTan((float)Math.tan(fovHVHalves.top));
+ tanHalfFov.setDownTan((float)Math.tan(fovHVHalves.bottom));
+ }
+ return tanHalfFov;
+ }
public static String toString(final ovrFovPort fov) {
return "["+fov.getLeftTan()+" l, "+fov.getRightTan()+" r, "+
diff --git a/src/test/com/jogamp/opengl/test/junit/jogl/demos/es2/GearsES2.java b/src/test/com/jogamp/opengl/test/junit/jogl/demos/es2/GearsES2.java
index 2709aa608..84cd8936e 100644
--- a/src/test/com/jogamp/opengl/test/junit/jogl/demos/es2/GearsES2.java
+++ b/src/test/com/jogamp/opengl/test/junit/jogl/demos/es2/GearsES2.java
@@ -35,7 +35,7 @@ import com.jogamp.opengl.math.FloatUtil;
import com.jogamp.opengl.math.Quaternion;
import com.jogamp.opengl.math.VectorUtil;
import com.jogamp.opengl.test.junit.jogl.demos.GearsObject;
-import com.jogamp.opengl.util.CustomRendererListener;
+import com.jogamp.opengl.util.CustomGLEventListener;
import com.jogamp.opengl.util.PMVMatrix;
import com.jogamp.opengl.util.TileRendererBase;
import com.jogamp.opengl.util.glsl.ShaderCode;
@@ -43,7 +43,7 @@ import com.jogamp.opengl.util.glsl.ShaderProgram;
import com.jogamp.opengl.util.glsl.ShaderState;
import com.jogamp.opengl.util.stereo.EyeParameter;
import com.jogamp.opengl.util.stereo.EyePose;
-import com.jogamp.opengl.util.stereo.StereoRendererListener;
+import com.jogamp.opengl.util.stereo.StereoGLEventListener;
import java.nio.FloatBuffer;
@@ -60,7 +60,7 @@ import javax.media.opengl.fixedfunc.GLMatrixFunc;
* GearsES2.java <BR>
* @author Brian Paul (converted to Java by Ron Cemer and Sven Gothel) <P>
*/
-public class GearsES2 implements StereoRendererListener, TileRendererBase.TileRendererListener {
+public class GearsES2 implements StereoGLEventListener, TileRendererBase.TileRendererListener {
private final FloatBuffer lightPos = Buffers.newDirectFloatBuffer( new float[] { 5.0f, 5.0f, 10.0f } );
private ShaderState st = null;
@@ -419,8 +419,8 @@ public class GearsES2 implements StereoRendererListener, TileRendererBase.TileRe
private final float[] vec3Tmp3 = new float[3];
@Override
- public void reshapeEye(final GLAutoDrawable drawable, final int x, final int y, final int width, final int height,
- final EyeParameter eyeParam, final EyePose eyePose) {
+ public void reshapeForEye(final GLAutoDrawable drawable, final int x, final int y, final int width, final int height,
+ final EyeParameter eyeParam, final EyePose eyePose) {
final GL2ES2 gl = drawable.getGL().getGL2ES2();
pmvMatrix.glMatrixMode(GLMatrixFunc.GL_PROJECTION);
final float[] mat4Projection = FloatUtil.makePerspective(mat4Tmp1, 0, true, eyeParam.fovhv, zNear, zFar);
@@ -507,8 +507,8 @@ public class GearsES2 implements StereoRendererListener, TileRendererBase.TileRe
System.err.println(Thread.currentThread()+" GearsES2.display "+sid()+" "+drawable.getSurfaceWidth()+"x"+drawable.getSurfaceHeight()+", swapInterval "+swapInterval+", drawable 0x"+Long.toHexString(drawable.getHandle()));
}
- final boolean repeatedFrame = 0 != ( CustomRendererListener.DISPLAY_REPEAT & flags );
- final boolean dontClear = 0 != ( CustomRendererListener.DISPLAY_DONTCLEAR & flags );
+ final boolean repeatedFrame = 0 != ( CustomGLEventListener.DISPLAY_REPEAT & flags );
+ final boolean dontClear = 0 != ( CustomGLEventListener.DISPLAY_DONTCLEAR & flags );
// Turn the gears' teeth
if( doRotate && !repeatedFrame ) {
diff --git a/src/test/com/jogamp/opengl/test/junit/jogl/demos/es2/av/MovieSBSStereo.java b/src/test/com/jogamp/opengl/test/junit/jogl/demos/es2/av/MovieSBSStereo.java
new file mode 100644
index 000000000..a2a39d631
--- /dev/null
+++ b/src/test/com/jogamp/opengl/test/junit/jogl/demos/es2/av/MovieSBSStereo.java
@@ -0,0 +1,859 @@
+/**
+ * Copyright 2012 JogAmp Community. All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or without modification, are
+ * permitted provided that the following conditions are met:
+ *
+ * 1. Redistributions of source code must retain the above copyright notice, this list of
+ * conditions and the following disclaimer.
+ *
+ * 2. Redistributions in binary form must reproduce the above copyright notice, this list
+ * of conditions and the following disclaimer in the documentation and/or other materials
+ * provided with the distribution.
+ *
+ * THIS SOFTWARE IS PROVIDED BY JogAmp Community ``AS IS'' AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND
+ * FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL JogAmp Community OR
+ * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+ * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
+ * SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON
+ * ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
+ * NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
+ * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ *
+ * The views and conclusions contained in the software and documentation are those of the
+ * authors and should not be interpreted as representing official policies, either expressed
+ * or implied, of JogAmp Community.
+ */
+
+package com.jogamp.opengl.test.junit.jogl.demos.es2.av;
+
+import java.net.URI;
+import java.net.URISyntaxException;
+import java.nio.FloatBuffer;
+
+import javax.media.opengl.GL;
+import javax.media.opengl.GL2ES2;
+import javax.media.opengl.GLAnimatorControl;
+import javax.media.opengl.GLAutoDrawable;
+import javax.media.opengl.GLES2;
+import javax.media.opengl.GLException;
+import javax.media.opengl.GLUniformData;
+import javax.media.opengl.fixedfunc.GLMatrixFunc;
+
+import com.jogamp.common.os.Platform;
+import com.jogamp.graph.curve.Region;
+import com.jogamp.graph.curve.opengl.GLRegion;
+import com.jogamp.graph.curve.opengl.RegionRenderer;
+import com.jogamp.graph.font.Font;
+import com.jogamp.newt.Window;
+import com.jogamp.newt.event.KeyAdapter;
+import com.jogamp.newt.event.KeyEvent;
+import com.jogamp.newt.event.KeyListener;
+import com.jogamp.newt.event.MouseAdapter;
+import com.jogamp.newt.event.MouseEvent;
+import com.jogamp.newt.event.MouseListener;
+import com.jogamp.newt.opengl.GLWindow;
+import com.jogamp.opengl.GLExtensions;
+import com.jogamp.opengl.JoglVersion;
+import com.jogamp.opengl.math.FloatUtil;
+import com.jogamp.opengl.math.Quaternion;
+import com.jogamp.opengl.math.VectorUtil;
+import com.jogamp.opengl.test.junit.graph.TextRendererGLELBase;
+import com.jogamp.opengl.test.junit.util.UITestCase;
+import com.jogamp.opengl.util.CustomGLEventListener;
+import com.jogamp.opengl.util.GLArrayDataServer;
+import com.jogamp.opengl.util.PMVMatrix;
+import com.jogamp.opengl.util.av.GLMediaPlayer;
+import com.jogamp.opengl.util.av.GLMediaPlayer.GLMediaEventListener;
+import com.jogamp.opengl.util.av.GLMediaPlayerFactory;
+import com.jogamp.opengl.util.glsl.ShaderCode;
+import com.jogamp.opengl.util.glsl.ShaderProgram;
+import com.jogamp.opengl.util.glsl.ShaderState;
+import com.jogamp.opengl.util.stereo.EyeParameter;
+import com.jogamp.opengl.util.stereo.EyePose;
+import com.jogamp.opengl.util.stereo.StereoClientRenderer;
+import com.jogamp.opengl.util.stereo.StereoGLEventListener;
+import com.jogamp.opengl.util.texture.Texture;
+import com.jogamp.opengl.util.texture.TextureCoords;
+import com.jogamp.opengl.util.texture.TextureSequence;
+import com.jogamp.opengl.util.texture.TextureSequence.TextureFrame;
+
+/**
+ * Side-By-Side (SBS) 3D Movie Player for {@link StereoClientRenderer}
+ * <p>
+ * The movie is assumed to be symmetrical SBS,
+ * the left-eye receives the left-part of the texture
+ * and the right-eye the right-part.
+ * </p>
+ */
+public class MovieSBSStereo implements StereoGLEventListener {
+ public static final String WINDOW_KEY = "window";
+ public static final String STEREO_RENDERER_KEY = "stereo";
+ public static final String PLAYER = "player";
+
+ private static boolean waitForKey = false;
+ private int surfWidth, surfHeight;
+ private int prevMouseX; // , prevMouseY;
+ private int rotate = 0;
+ private float zoom0;
+ private float zoom1;
+ private float zoom;
+ private long startTime;
+ private final float alpha = 1.0f;
+
+ private GLMediaPlayer mPlayer;
+ private boolean mPlayerScaleOrig;
+ private float[] verts = null;
+ private GLArrayDataServer interleavedVBOLeft;
+ private GLArrayDataServer interleavedVBORight;
+ private volatile boolean resetGLState = false;
+ private StereoClientRenderer stereoClientRenderer;
+
+ private ShaderState st;
+ private PMVMatrix pmvMatrix;
+ private GLUniformData pmvMatrixUniform;
+ private static final String shaderBasename = "texsequence_xxx";
+ private static final String myTextureLookupName = "myTexture2D";
+
+ /** Blender's Big Buck Bunny: 24f 416p H.264, AAC 48000 Hz, 2 ch, mpeg stream. */
+ public static final URI defURI;
+ static {
+ URI _defURI = null;
+ try {
+ // Blender's Big Buck Bunny Trailer: 24f 640p VP8, Vorbis 44100Hz mono, WebM/Matroska Stream.
+ // _defURI = new URI("http://video.webmfiles.org/big-buck-bunny_trailer.webm");
+ _defURI = new URI("http://archive.org/download/BigBuckBunny_328/BigBuckBunny_512kb.mp4");
+ } catch (final URISyntaxException e) {
+ e.printStackTrace();
+ }
+ defURI = _defURI;
+ }
+
+ final int[] textSampleCount = { 4 };
+
+ private final class InfoTextRendererGLELBase extends TextRendererGLELBase {
+ private final Font font = getFont(0, 0, 0);
+ private final float fontSize = 1f; // 0.01f;
+ private final GLRegion regionFPS;
+
+ InfoTextRendererGLELBase(final int rmode, final boolean lowPerfDevice) {
+ // FIXME: Graph TextRenderer does not AA well w/o MSAA and FBO
+ super(rmode, textSampleCount);
+ this.setRendererCallbacks(RegionRenderer.defaultBlendEnable, RegionRenderer.defaultBlendDisable);
+ if( lowPerfDevice ) {
+ regionFPS = null;
+ } else {
+ regionFPS = GLRegion.create(renderModes, null);
+ System.err.println("RegionFPS "+Region.getRenderModeString(renderModes)+", sampleCount "+textSampleCount[0]+", class "+regionFPS.getClass().getName());
+ }
+ staticRGBAColor[0] = 0.9f;
+ staticRGBAColor[1] = 0.9f;
+ staticRGBAColor[2] = 0.9f;
+ staticRGBAColor[3] = 1.0f;
+ }
+
+ @Override
+ public void init(final GLAutoDrawable drawable) {
+ super.init(drawable);
+ }
+
+ @Override
+ public void dispose(final GLAutoDrawable drawable) {
+ if( null != regionFPS ) {
+ regionFPS.destroy(drawable.getGL().getGL2ES2());
+ }
+ super.dispose(drawable);
+ }
+
+ @Override
+ public void display(final GLAutoDrawable drawable) {
+ final GLAnimatorControl anim = drawable.getAnimator();
+ final float lfps = null != anim ? anim.getLastFPS() : 0f;
+ final float tfps = null != anim ? anim.getTotalFPS() : 0f;
+ final boolean hasVideo = GLMediaPlayer.STREAM_ID_NONE != mPlayer.getVID();
+ final float pts = ( hasVideo ? mPlayer.getVideoPTS() : mPlayer.getAudioPTS() ) / 1000f;
+
+ // Note: MODELVIEW is from [ 0 .. height ]
+
+ final int height = 0; // drawable.getSurfaceHeight();
+
+ final float aspect = (float)mPlayer.getWidth() / (float)mPlayer.getHeight();
+
+ final String ptsPrec = null != regionFPS ? "3.1" : "3.0";
+ final String text1 = String.format("%0"+ptsPrec+"f/%0"+ptsPrec+"f s, %s (%01.2fx, vol %01.2f), a %01.2f, fps %02.1f -> %02.1f / %02.1f",
+ pts, mPlayer.getDuration() / 1000f,
+ mPlayer.getState().toString().toLowerCase(), mPlayer.getPlaySpeed(), mPlayer.getAudioVolume(),
+ aspect, mPlayer.getFramerate(), lfps, tfps);
+ final String text2 = String.format("audio: id %d, kbps %d, codec %s",
+ mPlayer.getAID(), mPlayer.getAudioBitrate()/1000, mPlayer.getAudioCodec());
+ final String text3 = String.format("video: id %d, kbps %d, codec %s",
+ mPlayer.getVID(), mPlayer.getVideoBitrate()/1000, mPlayer.getVideoCodec());
+ final String text4 = mPlayer.getURI().getRawPath();
+ if( displayOSD && null != renderer ) {
+ // We share ClearColor w/ MovieSimple's init !
+ final float pixelSize = font.getPixelSize(fontSize, dpiH);
+ if( null != regionFPS ) {
+ renderString(drawable, font, pixelSize, text1, 1 /* col */, 1 /* row */, 0, 0, -1, regionFPS); // no-cache
+ } else {
+ renderString(drawable, font, pixelSize, text1, 1 /* col */, 1 /* row */, 0, 0, -1, true);
+ }
+ renderString(drawable, font, pixelSize, text2, 1 /* col */, -4 /* row */, 0, height, -1, true);
+ renderString(drawable, font, pixelSize, text3, 1 /* col */, -3 /* row */, 0, height, 0, true);
+ renderString(drawable, font, pixelSize, text4, 1 /* col */, -2 /* row */, 0, height, 1, true);
+ }
+ } };
+ private InfoTextRendererGLELBase textRendererGLEL = null;
+ private boolean displayOSD = false;
+
+ private final MouseListener mouseAction = new MouseAdapter() {
+ public void mousePressed(final MouseEvent e) {
+ if(e.getY()<=surfHeight/2 && null!=mPlayer && 1 == e.getClickCount()) {
+ if(GLMediaPlayer.State.Playing == mPlayer.getState()) {
+ mPlayer.pause(false);
+ } else {
+ mPlayer.play();
+ }
+ }
+ }
+ public void mouseReleased(final MouseEvent e) {
+ if(e.getY()<=surfHeight/2) {
+ rotate = -1;
+ zoom = zoom0;
+ System.err.println("zoom: "+zoom);
+ }
+ }
+ public void mouseMoved(final MouseEvent e) {
+ prevMouseX = e.getX();
+ // prevMouseY = e.getY();
+ }
+ public void mouseDragged(final MouseEvent e) {
+ final int x = e.getX();
+ final int y = e.getY();
+
+ if(y>surfHeight/2) {
+ final float dp = (float)(x-prevMouseX)/(float)surfWidth;
+ final int pts0 = GLMediaPlayer.STREAM_ID_NONE != mPlayer.getVID() ? mPlayer.getVideoPTS() : mPlayer.getAudioPTS();
+ mPlayer.seek(pts0 + (int) (mPlayer.getDuration() * dp));
+ } else {
+ mPlayer.play();
+ rotate = 1;
+ zoom = zoom1;
+ }
+
+ prevMouseX = x;
+ // prevMouseY = y;
+ }
+ public void mouseWheelMoved(final MouseEvent e) {
+ if( !e.isShiftDown() ) {
+ zoom += e.getRotation()[1]/10f; // vertical: wheel
+ System.err.println("zoom: "+zoom);
+ }
+ } };
+
+ private final KeyListener keyAction = new KeyAdapter() {
+ public void keyReleased(final KeyEvent e) {
+ if( e.isAutoRepeat() ) {
+ return;
+ }
+ System.err.println("MC "+e);
+ final int pts0 = GLMediaPlayer.STREAM_ID_NONE != mPlayer.getVID() ? mPlayer.getVideoPTS() : mPlayer.getAudioPTS();
+ int pts1 = 0;
+ switch(e.getKeySymbol()) {
+ case KeyEvent.VK_O: displayOSD = !displayOSD; break;
+ case KeyEvent.VK_RIGHT: pts1 = pts0 + 1000; break;
+ case KeyEvent.VK_UP: pts1 = pts0 + 10000; break;
+ case KeyEvent.VK_PAGE_UP: pts1 = pts0 + 30000; break;
+ case KeyEvent.VK_LEFT: pts1 = pts0 - 1000; break;
+ case KeyEvent.VK_DOWN: pts1 = pts0 - 10000; break;
+ case KeyEvent.VK_PAGE_DOWN: pts1 = pts0 - 30000; break;
+ case KeyEvent.VK_ESCAPE:
+ case KeyEvent.VK_HOME:
+ case KeyEvent.VK_BACK_SPACE: {
+ mPlayer.seek(0);
+ break;
+ }
+ case KeyEvent.VK_SPACE: {
+ if(GLMediaPlayer.State.Paused == mPlayer.getState()) {
+ mPlayer.play();
+ } else {
+ mPlayer.pause(false);
+ }
+ break;
+ }
+ case KeyEvent.VK_MULTIPLY:
+ mPlayer.setPlaySpeed(1.0f);
+ break;
+ case KeyEvent.VK_SUBTRACT: {
+ float playSpeed = mPlayer.getPlaySpeed();
+ if( e.isShiftDown() ) {
+ playSpeed /= 2.0f;
+ } else {
+ playSpeed -= 0.1f;
+ }
+ mPlayer.setPlaySpeed(playSpeed);
+ } break;
+ case KeyEvent.VK_ADD: {
+ float playSpeed = mPlayer.getPlaySpeed();
+ if( e.isShiftDown() ) {
+ playSpeed *= 2.0f;
+ } else {
+ playSpeed += 0.1f;
+ }
+ mPlayer.setPlaySpeed(playSpeed);
+ } break;
+ case KeyEvent.VK_M: {
+ float audioVolume = mPlayer.getAudioVolume();
+ if( audioVolume > 0.5f ) {
+ audioVolume = 0f;
+ } else {
+ audioVolume = 1f;
+ }
+ mPlayer.setAudioVolume(audioVolume);
+ } break;
+ }
+
+ if( 0 != pts1 ) {
+ mPlayer.seek(pts1);
+ }
+ } };
+
+ /** user needs to issue {@link #initStream(URI, int, int, int)} afterwards. */
+ public MovieSBSStereo() throws IllegalStateException {
+ mPlayerScaleOrig = false;
+ mPlayer = GLMediaPlayerFactory.createDefault();
+ mPlayer.attachObject(PLAYER, this);
+ System.out.println("pC.1a "+mPlayer);
+ }
+
+ public void initStream(final URI streamLoc, final int vid, final int aid, final int textureCount) {
+ mPlayer.initStream(streamLoc, vid, aid, textureCount);
+ System.out.println("pC.1b "+mPlayer);
+ }
+
+ public GLMediaPlayer getGLMediaPlayer() { return mPlayer; }
+
+ public void setScaleOrig(final boolean v) {
+ mPlayerScaleOrig = v;
+ }
+
+ public void setStereoClientRenderer(final StereoClientRenderer scr) {
+ stereoClientRenderer = scr;
+ }
+ public StereoClientRenderer getStereoClientRenderer() { return stereoClientRenderer; }
+
+ public void resetGLState() {
+ resetGLState = true;
+ }
+
+ private void initShader(final GL2ES2 gl) {
+ // Create & Compile the shader objects
+ final ShaderCode rsVp = ShaderCode.create(gl, GL2ES2.GL_VERTEX_SHADER, MovieSBSStereo.class,
+ "../shader", "../shader/bin", shaderBasename, true);
+ final ShaderCode rsFp = ShaderCode.create(gl, GL2ES2.GL_FRAGMENT_SHADER, MovieSBSStereo.class,
+ "../shader", "../shader/bin", shaderBasename, true);
+
+ boolean preludeGLSLVersion = true;
+ if( GLES2.GL_TEXTURE_EXTERNAL_OES == mPlayer.getTextureTarget() ) {
+ if( !gl.isExtensionAvailable(GLExtensions.OES_EGL_image_external) ) {
+ throw new GLException(GLExtensions.OES_EGL_image_external+" requested but not available");
+ }
+ if( Platform.OSType.ANDROID == Platform.getOSType() && gl.isGLES3() ) {
+ // Bug on Nexus 10, ES3 - Android 4.3, where
+ // GL_OES_EGL_image_external extension directive leads to a failure _with_ '#version 300 es' !
+ // P0003: Extension 'GL_OES_EGL_image_external' not supported
+ preludeGLSLVersion = false;
+ }
+ }
+ rsVp.defaultShaderCustomization(gl, preludeGLSLVersion, true);
+
+ int rsFpPos = preludeGLSLVersion ? rsFp.addGLSLVersion(gl) : 0;
+ rsFpPos = rsFp.insertShaderSource(0, rsFpPos, mPlayer.getRequiredExtensionsShaderStub());
+ rsFpPos = rsFp.addDefaultShaderPrecision(gl, rsFpPos);
+
+ final String texLookupFuncName = mPlayer.getTextureLookupFunctionName(myTextureLookupName);
+ rsFp.replaceInShaderSource(myTextureLookupName, texLookupFuncName);
+
+ // Inject TextureSequence shader details
+ final StringBuilder sFpIns = new StringBuilder();
+ sFpIns.append("uniform ").append(mPlayer.getTextureSampler2DType()).append(" mgl_ActiveTexture;\n");
+ sFpIns.append(mPlayer.getTextureLookupFragmentShaderImpl());
+ rsFp.insertShaderSource(0, "TEXTURE-SEQUENCE-CODE-BEGIN", 0, sFpIns);
+
+ // Create & Link the shader program
+ final ShaderProgram sp = new ShaderProgram();
+ sp.add(rsVp);
+ sp.add(rsFp);
+ if(!sp.link(gl, System.err)) {
+ throw new GLException("Couldn't link program: "+sp);
+ }
+
+ // Let's manage all our states using ShaderState.
+ st = new ShaderState();
+ st.attachShaderProgram(gl, sp, false);
+ }
+
+ @Override
+ public void init(final GLAutoDrawable drawable) {
+ if(null == mPlayer) {
+ throw new InternalError("mPlayer null");
+ }
+ if( GLMediaPlayer.State.Uninitialized == mPlayer.getState() ) {
+ throw new IllegalStateException("mPlayer in uninitialized state: "+mPlayer);
+ }
+ final boolean hasVideo = GLMediaPlayer.STREAM_ID_NONE != mPlayer.getVID();
+ resetGLState = false;
+
+ zoom0 = -2.1f;
+ zoom1 = -5f;
+ zoom = 0f;
+
+ final GL2ES2 gl = drawable.getGL().getGL2ES2();
+ System.err.println(JoglVersion.getGLInfo(gl, null));
+ System.err.println("Alpha: "+alpha+", opaque "+drawable.getChosenGLCapabilities().isBackgroundOpaque()+
+ ", "+drawable.getClass().getName()+", "+drawable);
+
+ if(waitForKey) {
+ UITestCase.waitForKey("Init>");
+ }
+ final Texture tex;
+ try {
+ System.out.println("p0 "+mPlayer);
+ if(GLMediaPlayer.State.Initialized == mPlayer.getState() ) {
+ mPlayer.initGL(gl);
+ }
+ System.out.println("p1 "+mPlayer);
+ final TextureFrame frame = mPlayer.getLastTexture();
+ if( null != frame ) {
+ if( !hasVideo ) {
+ throw new InternalError("XXX: "+mPlayer);
+ }
+ tex = frame.getTexture();
+ if( null == tex ) {
+ throw new InternalError("XXX: "+mPlayer);
+ }
+ } else {
+ tex = null;
+ if( hasVideo ) {
+ throw new InternalError("XXX: "+mPlayer);
+ }
+ }
+ mPlayer.setTextureMinMagFilter( new int[] { GL.GL_NEAREST, GL.GL_NEAREST } );
+ } catch (final Exception glex) {
+ glex.printStackTrace();
+ if(null != mPlayer) {
+ mPlayer.destroy(gl);
+ mPlayer = null;
+ }
+ throw new GLException(glex);
+ }
+
+ if( hasVideo ) {
+ initShader(gl);
+
+ // Push the 1st uniform down the path
+ st.useProgram(gl, true);
+
+ final int[] viewPort = new int[] { 0, 0, drawable.getSurfaceWidth(), drawable.getSurfaceHeight()};
+ pmvMatrix = new PMVMatrix();
+ reshapePMV(viewPort[2], viewPort[3]);
+ pmvMatrixUniform = new GLUniformData("mgl_PMVMatrix", 4, 4, pmvMatrix.glGetPMvMatrixf());
+ if(!st.uniform(gl, pmvMatrixUniform)) {
+ throw new GLException("Error setting PMVMatrix in shader: "+st);
+ }
+ if(!st.uniform(gl, new GLUniformData("mgl_ActiveTexture", mPlayer.getTextureUnit()))) {
+ throw new GLException("Error setting mgl_ActiveTexture in shader: "+st);
+ }
+
+ final float dWidth = drawable.getSurfaceWidth();
+ final float dHeight = drawable.getSurfaceHeight();
+ final float mWidth = mPlayer.getWidth();
+ final float mHeight = mPlayer.getHeight();
+ final float mAspect = mWidth/mHeight;
+ System.err.println("XXX0: mov aspect: "+mAspect);
+ float xs, ys;
+ if(mPlayerScaleOrig && mWidth < dWidth && mHeight < dHeight) {
+ xs = mAspect * ( mWidth / dWidth ) ; ys = xs / mAspect ;
+ } else {
+ xs = mAspect; ys = 1f; // b>h
+ }
+ verts = new float[] { -1f*xs, -1f*ys, 0f, // LB
+ 1f*xs, 1f*ys, 0f // RT
+ };
+ {
+ System.err.println("XXX0: pixel LB: "+verts[0]+", "+verts[1]+", "+verts[2]);
+ System.err.println("XXX0: pixel RT: "+verts[3]+", "+verts[4]+", "+verts[5]);
+ final float[] winLB = new float[3];
+ final float[] winRT = new float[3];
+ pmvMatrix.gluProject(verts[0], verts[1], verts[2], viewPort, 0, winLB, 0);
+ pmvMatrix.gluProject(verts[3], verts[4], verts[5], viewPort, 0, winRT, 0);
+ System.err.println("XXX0: win LB: "+winLB[0]+", "+winLB[1]+", "+winLB[2]);
+ System.err.println("XXX0: win RT: "+winRT[0]+", "+winRT[1]+", "+winRT[2]);
+ }
+
+ interleavedVBOLeft = GLArrayDataServer.createGLSLInterleaved(3+4+2, GL.GL_FLOAT, false, 3*4, GL.GL_STATIC_DRAW);
+ {
+ interleavedVBOLeft.addGLSLSubArray("mgl_Vertex", 3, GL.GL_ARRAY_BUFFER);
+ interleavedVBOLeft.addGLSLSubArray("mgl_Color", 4, GL.GL_ARRAY_BUFFER);
+ interleavedVBOLeft.addGLSLSubArray("mgl_MultiTexCoord", 2, GL.GL_ARRAY_BUFFER);
+ }
+ interleavedVBORight = GLArrayDataServer.createGLSLInterleaved(3+4+2, GL.GL_FLOAT, false, 3*4, GL.GL_STATIC_DRAW);
+ {
+ interleavedVBORight.addGLSLSubArray("mgl_Vertex", 3, GL.GL_ARRAY_BUFFER);
+ interleavedVBORight.addGLSLSubArray("mgl_Color", 4, GL.GL_ARRAY_BUFFER);
+ interleavedVBORight.addGLSLSubArray("mgl_MultiTexCoord", 2, GL.GL_ARRAY_BUFFER);
+ }
+ updateInterleavedVBO(gl, interleavedVBOLeft, tex, 0);
+ updateInterleavedVBO(gl, interleavedVBORight, tex, 1);
+
+ st.ownAttribute(interleavedVBOLeft, true);
+ st.ownAttribute(interleavedVBORight, true);
+ gl.glClearColor(0.3f, 0.3f, 0.3f, 0.3f);
+
+ gl.glEnable(GL.GL_DEPTH_TEST);
+
+ st.useProgram(gl, false);
+
+ // Let's show the completed shader state ..
+ System.out.println("iVBOLeft : "+interleavedVBOLeft);
+ System.out.println("iVBORight: "+interleavedVBORight);
+ System.out.println(st);
+ }
+
+ mPlayer.play();
+ System.out.println("play.0 "+mPlayer);
+ startTime = System.currentTimeMillis();
+
+ final Object upstreamWidget = drawable.getUpstreamWidget();
+ if (upstreamWidget instanceof Window) {
+ final Window window = (Window) upstreamWidget;
+ window.addMouseListener(mouseAction);
+ window.addKeyListener(keyAction);
+ surfWidth = window.getSurfaceWidth();
+ surfHeight = window.getSurfaceHeight();
+ }
+ final int rmode = drawable.getChosenGLCapabilities().getSampleBuffers() ? 0 : Region.VBAA_RENDERING_BIT;
+ final boolean lowPerfDevice = gl.isGLES();
+ textRendererGLEL = new InfoTextRendererGLELBase(rmode, lowPerfDevice);
+ textRendererGLEL.init(drawable);
+ }
+
+ protected void updateInterleavedVBO(final GL gl, final GLArrayDataServer iVBO, final Texture tex, final int eyeNum) {
+ final boolean wasEnabled = iVBO.enabled();
+ iVBO.seal(gl, false);
+ iVBO.rewind();
+ {
+ final FloatBuffer ib = (FloatBuffer)iVBO.getBuffer();
+ final TextureCoords tc = tex.getImageTexCoords();
+ final float texHalfWidth = tc.right()/2f;
+ System.err.println("XXX0: "+tc+", texHalfWidth "+texHalfWidth);
+ System.err.println("XXX0: tex aspect: "+tex.getAspectRatio());
+ System.err.println("XXX0: tex y-flip: "+tex.getMustFlipVertically());
+
+ // left-bottom
+ ib.put(verts[0]); ib.put(verts[1]); ib.put(verts[2]);
+ ib.put( 1); ib.put( 1); ib.put( 1); ib.put(alpha);
+ if( 0 == eyeNum ) {
+ ib.put( tc.left() ); ib.put( tc.bottom() );
+ } else {
+ ib.put( tc.left() + texHalfWidth ); ib.put( tc.bottom() );
+ }
+
+ // right-bottom
+ ib.put(verts[3]); ib.put(verts[1]); ib.put(verts[2]);
+ ib.put( 1); ib.put( 1); ib.put( 1); ib.put(alpha);
+ if( 0 == eyeNum ) {
+ ib.put( texHalfWidth ); ib.put( tc.bottom() );
+ } else {
+ ib.put( tc.right() ); ib.put( tc.bottom() );
+ }
+
+ // left-top
+ ib.put(verts[0]); ib.put(verts[4]); ib.put(verts[2]);
+ ib.put( 1); ib.put( 1); ib.put( 1); ib.put(alpha);
+ if( 0 == eyeNum ) {
+ ib.put( tc.left() ); ib.put( tc.top() );
+ } else {
+ ib.put( tc.left() + texHalfWidth ); ib.put( tc.top() );
+ }
+
+ // right-top
+ ib.put(verts[3]); ib.put(verts[4]); ib.put(verts[2]);
+ ib.put( 1); ib.put( 1); ib.put( 1); ib.put(alpha);
+ if( 0 == eyeNum ) {
+ ib.put( texHalfWidth ); ib.put( tc.top() );
+ } else {
+ ib.put( tc.right() ); ib.put( tc.top() );
+ }
+ }
+ iVBO.seal(gl, true);
+ if( !wasEnabled ) {
+ iVBO.enableBuffer(gl, false);
+ }
+ }
+
+ @Override
+ public void reshape(final GLAutoDrawable drawable, final int x, final int y, final int width, final int height) {
+ final GL2ES2 gl = drawable.getGL().getGL2ES2();
+ surfWidth = width;
+ surfHeight = height;
+
+ if(null == mPlayer) { return; }
+
+ if(null != st) {
+ reshapePMV(width, height);
+ st.useProgram(gl, true);
+ st.uniform(gl, pmvMatrixUniform);
+ st.useProgram(gl, false);
+ }
+
+ System.out.println("pR "+mPlayer);
+ textRendererGLEL.reshape(drawable, x, y, width, height);
+ }
+
+ private final float zNear = 0.1f;
+ private final float zFar = 10000f;
+
+ private void reshapePMV(final int width, final int height) {
+ pmvMatrix.glMatrixMode(GLMatrixFunc.GL_PROJECTION);
+ pmvMatrix.glLoadIdentity();
+ pmvMatrix.gluPerspective(45.0f, (float)width / (float)height, zNear, zFar);
+
+ pmvMatrix.glMatrixMode(GLMatrixFunc.GL_MODELVIEW);
+ pmvMatrix.glLoadIdentity();
+ pmvMatrix.glTranslatef(0, 0, zoom0);
+ }
+
+ private final float[] mat4Tmp1 = new float[16];
+ private final float[] mat4Tmp2 = new float[16];
+ private final float[] vec3Tmp1 = new float[3];
+ private final float[] vec3Tmp2 = new float[3];
+ private final float[] vec3Tmp3 = new float[3];
+
+ GLArrayDataServer interleavedVBOCurrent = null;
+
+ @Override
+ public void reshapeForEye(final GLAutoDrawable drawable, final int x, final int y, final int width, final int height,
+ final EyeParameter eyeParam, final EyePose eyePose) {
+ final GL2ES2 gl = drawable.getGL().getGL2ES2();
+ interleavedVBOCurrent = 0 == eyeParam.number ? interleavedVBOLeft : interleavedVBORight;
+
+ surfWidth = drawable.getSurfaceWidth();
+ surfHeight = drawable.getSurfaceHeight();
+
+ if(null == mPlayer) { return; }
+ if(null == st) { return; }
+
+ pmvMatrix.glMatrixMode(GLMatrixFunc.GL_PROJECTION);
+ final float[] mat4Projection = FloatUtil.makePerspective(mat4Tmp1, 0, true, eyeParam.fovhv, zNear, zFar);
+ pmvMatrix.glLoadMatrixf(mat4Projection, 0);
+
+ pmvMatrix.glMatrixMode(GLMatrixFunc.GL_MODELVIEW);
+ final Quaternion rollPitchYaw = new Quaternion();
+ final float[] shiftedEyePos = rollPitchYaw.rotateVector(vec3Tmp1, 0, eyePose.position, 0);
+ VectorUtil.addVec3(shiftedEyePos, shiftedEyePos, eyeParam.positionOffset);
+
+ rollPitchYaw.mult(eyePose.orientation);
+ final float[] up = rollPitchYaw.rotateVector(vec3Tmp2, 0, VectorUtil.VEC3_UNIT_Y, 0);
+ final float[] forward = rollPitchYaw.rotateVector(vec3Tmp3, 0, VectorUtil.VEC3_UNIT_Z_NEG, 0);
+ final float[] center = VectorUtil.addVec3(forward, shiftedEyePos, forward);
+
+ final float[] mLookAt = FloatUtil.makeLookAt(mat4Tmp1, 0, shiftedEyePos, 0, center, 0, up, 0, mat4Tmp2);
+ final float[] mViewAdjust = FloatUtil.makeTranslation(mat4Tmp2, true, eyeParam.distNoseToPupilX, eyeParam.distMiddleToPupilY, eyeParam.eyeReliefZ);
+ final float[] mat4Modelview = FloatUtil.multMatrix(mViewAdjust, mLookAt);
+ pmvMatrix.glLoadMatrixf(mat4Modelview, 0);
+ pmvMatrix.glTranslatef(0, 0, zoom0);
+ st.useProgram(gl, true);
+ st.uniform(gl, pmvMatrixUniform);
+ st.useProgram(gl, false);
+ textRendererGLEL.reshape(drawable, x, y, width, height);
+ }
+
+ @Override
+ public void dispose(final GLAutoDrawable drawable) {
+ textRendererGLEL.dispose(drawable);
+ textRendererGLEL = null;
+ disposeImpl(drawable, true);
+ }
+
+ private void disposeImpl(final GLAutoDrawable drawable, final boolean disposePlayer) {
+ if(null == mPlayer) { return; }
+
+ final Object upstreamWidget = drawable.getUpstreamWidget();
+ if (upstreamWidget instanceof Window) {
+ final Window window = (Window) upstreamWidget;
+ window.removeMouseListener(mouseAction);
+ window.removeKeyListener(keyAction);
+ }
+
+ System.out.println("pD.1 "+mPlayer+", disposePlayer "+disposePlayer);
+ final GL2ES2 gl = drawable.getGL().getGL2ES2();
+ if( disposePlayer ) {
+ mPlayer.destroy(gl);
+ System.out.println("pD.X "+mPlayer);
+ mPlayer=null;
+ }
+ pmvMatrixUniform = null;
+ if(null != pmvMatrix) {
+ pmvMatrix=null;
+ }
+ if(null != st) {
+ st.destroy(gl);
+ st=null;
+ }
+ }
+
+ long lastPerfPos = 0;
+
+ @Override
+ public void display(final GLAutoDrawable drawable) {
+ display(drawable, 0);
+ }
+
+ @Override
+ public void display(final GLAutoDrawable drawable, final int flags) {
+ // TODO Auto-generated method stub
+ final boolean repeatedFrame = 0 != ( CustomGLEventListener.DISPLAY_REPEAT & flags );
+ final boolean dontClear = 0 != ( CustomGLEventListener.DISPLAY_DONTCLEAR & flags );
+ final GLArrayDataServer iVBO = null != interleavedVBOCurrent ? interleavedVBOCurrent : interleavedVBOLeft;
+
+ final GL2ES2 gl = drawable.getGL().getGL2ES2();
+ if(null == mPlayer) { return; }
+
+ if( resetGLState ) {
+ resetGLState = false;
+ System.err.println("XXX resetGLState");
+ disposeImpl(drawable, false);
+ init(drawable);
+ reshape(drawable, 0, 0, drawable.getSurfaceWidth(), drawable.getSurfaceHeight());
+ }
+
+ final long currentPos = System.currentTimeMillis();
+ if( currentPos - lastPerfPos > 2000 ) {
+ System.err.println( mPlayer.getPerfString() );
+ lastPerfPos = currentPos;
+ }
+
+ if( !dontClear ) {
+ gl.glClear(GL.GL_COLOR_BUFFER_BIT | GL.GL_DEPTH_BUFFER_BIT);
+ }
+
+ if(null == st) {
+ return;
+ }
+
+ st.useProgram(gl, true);
+
+ pmvMatrix.glMatrixMode(GLMatrixFunc.GL_MODELVIEW);
+ pmvMatrix.glPushMatrix();
+ pmvMatrix.glTranslatef(0, 0, zoom);
+ if(rotate > 0) {
+ final float ang = ((System.currentTimeMillis() - startTime) * 360.0f) / 8000.0f;
+ pmvMatrix.glRotatef(ang, 0, 0, 1);
+ } else {
+ rotate = 0;
+ }
+ st.uniform(gl, pmvMatrixUniform);
+ iVBO.enableBuffer(gl, true);
+ Texture tex = null;
+ if(null!=mPlayer) {
+ final TextureSequence.TextureFrame texFrame;
+ if( repeatedFrame ) {
+ texFrame=mPlayer.getLastTexture();
+ } else {
+ texFrame=mPlayer.getNextTexture(gl);
+ }
+ if(null != texFrame) {
+ tex = texFrame.getTexture();
+ gl.glActiveTexture(GL.GL_TEXTURE0+mPlayer.getTextureUnit());
+ tex.enable(gl);
+ tex.bind(gl);
+ }
+ }
+ gl.glDrawArrays(GL.GL_TRIANGLE_STRIP, 0, 4);
+ if(null != tex) {
+ tex.disable(gl);
+ }
+ iVBO.enableBuffer(gl, false);
+ st.useProgram(gl, false);
+ pmvMatrix.glPopMatrix();
+
+ textRendererGLEL.display(drawable);
+ }
+
+ static class StereoGLMediaEventListener implements GLMediaEventListener {
+ void destroyWindow(final Window window) {
+ new Thread() {
+ public void run() {
+ window.destroy();
+ } }.start();
+ }
+
+ @Override
+ public void newFrameAvailable(final GLMediaPlayer ts, final TextureFrame newFrame, final long when) {
+ }
+
+ @Override
+ public void attributesChanged(final GLMediaPlayer mp, final int event_mask, final long when) {
+ System.err.println("MovieSimple AttributesChanges: events_mask 0x"+Integer.toHexString(event_mask)+", when "+when);
+ System.err.println("MovieSimple State: "+mp);
+ final GLWindow window = (GLWindow) mp.getAttachedObject(WINDOW_KEY);
+ final MovieSBSStereo ms = (MovieSBSStereo)mp.getAttachedObject(PLAYER);
+ final StereoClientRenderer stereoClientRenderer = (StereoClientRenderer) mp.getAttachedObject(STEREO_RENDERER_KEY);
+
+ if( 0 != ( GLMediaEventListener.EVENT_CHANGE_SIZE & event_mask ) ) {
+ System.err.println("MovieSimple State: CHANGE_SIZE");
+ // window.disposeGLEventListener(ms, false /* remove */ );
+ ms.resetGLState();
+ }
+ if( 0 != ( GLMediaEventListener.EVENT_CHANGE_INIT & event_mask ) ) {
+ System.err.println("MovieSimple State: INIT");
+ // Use GLEventListener in all cases [A+V, V, A]
+ stereoClientRenderer.addGLEventListener(ms);
+ final GLAnimatorControl anim = window.getAnimator();
+ anim.setUpdateFPSFrames(60, null);
+ anim.resetFPSCounter();
+ ms.setStereoClientRenderer(stereoClientRenderer);
+ }
+ if( 0 != ( GLMediaEventListener.EVENT_CHANGE_PLAY & event_mask ) ) {
+ window.getAnimator().resetFPSCounter();
+ }
+
+ boolean destroy = false;
+ Throwable err = null;
+
+ if( 0 != ( GLMediaEventListener.EVENT_CHANGE_EOS & event_mask ) ) {
+ err = ms.mPlayer.getStreamException();
+ if( null != err ) {
+ System.err.println("MovieSimple State: EOS + Exception");
+ destroy = true;
+ } else {
+ System.err.println("MovieSimple State: EOS");
+ new Thread() {
+ public void run() {
+ mp.setPlaySpeed(1f);
+ mp.seek(0);
+ mp.play();
+ }
+ }.start();
+ }
+ }
+ if( 0 != ( GLMediaEventListener.EVENT_CHANGE_ERR & event_mask ) ) {
+ err = ms.mPlayer.getStreamException();
+ if( null != err ) {
+ System.err.println("MovieSimple State: ERR + Exception");
+ } else {
+ System.err.println("MovieSimple State: ERR");
+ }
+ destroy = true;
+ }
+ if( destroy ) {
+ if( null != err ) {
+ err.printStackTrace();
+ }
+ destroyWindow(window);
+ }
+ }
+ };
+ public final static StereoGLMediaEventListener stereoGLMediaEventListener = new StereoGLMediaEventListener();
+}
diff --git a/src/test/com/jogamp/opengl/test/junit/jogl/demos/es2/av/MovieSimple.java b/src/test/com/jogamp/opengl/test/junit/jogl/demos/es2/av/MovieSimple.java
index 8760cd2c3..5918e4e6b 100644
--- a/src/test/com/jogamp/opengl/test/junit/jogl/demos/es2/av/MovieSimple.java
+++ b/src/test/com/jogamp/opengl/test/junit/jogl/demos/es2/av/MovieSimple.java
@@ -90,8 +90,8 @@ public class MovieSimple implements GLEventListener {
public static final int EFFECT_GRADIENT_BOTTOM2TOP = 1<<1;
public static final int EFFECT_TRANSPARENT = 1<<3;
- private static final String WINDOW_KEY = "window";
- private static final String PLAYER = "player";
+ public static final String WINDOW_KEY = "window";
+ public static final String PLAYER = "player";
private static boolean waitForKey = false;
private int surfWidth, surfHeight;
@@ -682,6 +682,9 @@ public class MovieSimple implements GLEventListener {
System.out.println("pR "+mPlayer);
}
+ private final float zNear = 1f;
+ private final float zFar = 10f;
+
private void reshapePMV(final int width, final int height) {
pmvMatrix.glMatrixMode(GLMatrixFunc.GL_PROJECTION);
pmvMatrix.glLoadIdentity();
@@ -691,7 +694,7 @@ public class MovieSimple implements GLEventListener {
pmvMatrix.glOrthof(-fw, fw, -fh, fh, -1.0f, 1.0f);
nearPlaneNormalized = 0f;
} else {
- pmvMatrix.gluPerspective(45.0f, (float)width / (float)height, 1f, 10.0f);
+ pmvMatrix.gluPerspective(45.0f, (float)width / (float)height, zNear, zFar);
nearPlaneNormalized = 1f/(10f-1f);
}
System.err.println("XXX0: Perspective nearPlaneNormalized: "+nearPlaneNormalized);
@@ -786,7 +789,7 @@ public class MovieSimple implements GLEventListener {
Texture tex = null;
if(null!=mPlayer) {
final TextureSequence.TextureFrame texFrame;
- if(mPlayerShared) {
+ if( mPlayerShared ) {
texFrame=mPlayer.getLastTexture();
} else {
texFrame=mPlayer.getNextTexture(gl);
@@ -903,7 +906,7 @@ public class MovieSimple implements GLEventListener {
}
}
};
- final static MyGLMediaEventListener myGLMediaEventListener = new MyGLMediaEventListener();
+ public final static MyGLMediaEventListener myGLMediaEventListener = new MyGLMediaEventListener();
static boolean loopEOS = false;
static boolean origSize;
@@ -1068,4 +1071,5 @@ public class MovieSimple implements GLEventListener {
mss[i].initStream(streamLocN, vid, aid, textureCount);
}
}
+
}
diff --git a/src/test/com/jogamp/opengl/test/junit/jogl/stereo/StereoDemo01.java b/src/test/com/jogamp/opengl/test/junit/jogl/stereo/StereoDemo01.java
new file mode 100644
index 000000000..c4db20f37
--- /dev/null
+++ b/src/test/com/jogamp/opengl/test/junit/jogl/stereo/StereoDemo01.java
@@ -0,0 +1,307 @@
+/**
+ * Copyright 2014 JogAmp Community. All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or without modification, are
+ * permitted provided that the following conditions are met:
+ *
+ * 1. Redistributions of source code must retain the above copyright notice, this list of
+ * conditions and the following disclaimer.
+ *
+ * 2. Redistributions in binary form must reproduce the above copyright notice, this list
+ * of conditions and the following disclaimer in the documentation and/or other materials
+ * provided with the distribution.
+ *
+ * THIS SOFTWARE IS PROVIDED BY JogAmp Community ``AS IS'' AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND
+ * FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL JogAmp Community OR
+ * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+ * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
+ * SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON
+ * ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
+ * NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
+ * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ *
+ * The views and conclusions contained in the software and documentation are those of the
+ * authors and should not be interpreted as representing official policies, either expressed
+ * or implied, of JogAmp Community.
+ */
+package com.jogamp.opengl.test.junit.jogl.stereo;
+
+import java.io.File;
+import java.net.URI;
+import java.net.URISyntaxException;
+
+import javax.media.nativewindow.util.DimensionImmutable;
+import javax.media.nativewindow.util.PointImmutable;
+import javax.media.opengl.GL;
+import javax.media.opengl.GLCapabilities;
+import javax.media.opengl.GLProfile;
+
+import com.jogamp.common.util.IOUtil;
+import com.jogamp.common.util.ReflectionUtil;
+import com.jogamp.newt.event.KeyAdapter;
+import com.jogamp.newt.event.KeyEvent;
+import com.jogamp.newt.opengl.GLWindow;
+import com.jogamp.opengl.math.FovHVHalves;
+import com.jogamp.opengl.test.junit.jogl.demos.es2.GearsES2;
+import com.jogamp.opengl.test.junit.jogl.demos.es2.av.MovieSBSStereo;
+import com.jogamp.opengl.test.junit.jogl.demos.es2.av.MovieSimple;
+import com.jogamp.opengl.test.junit.util.MiscUtils;
+import com.jogamp.opengl.test.junit.util.QuitAdapter;
+import com.jogamp.opengl.util.Animator;
+import com.jogamp.opengl.util.AnimatorBase;
+import com.jogamp.opengl.util.av.GLMediaPlayer;
+import com.jogamp.opengl.util.stereo.StereoDevice;
+import com.jogamp.opengl.util.stereo.StereoDeviceRenderer;
+import com.jogamp.opengl.util.stereo.StereoDeviceFactory;
+import com.jogamp.opengl.util.stereo.StereoClientRenderer;
+import com.jogamp.opengl.util.stereo.StereoGLEventListener;
+
+/**
+ * All distortions, no multisampling, bilinear filtering, manual-swap and using two FBOs (default, good)
+ * <pre>
+ * java StereoDemo01 -time 10000000
+ * </pre>
+ * All distortions, 8x multisampling, bilinear filtering, manual-swap and using two FBOs (best - slowest)
+ * <pre>
+ * java StereoDemo01 -time 10000000 -samples 8
+ * </pre>
+ * All distortions, 8x multisampling, bilinear filtering, manual-swap and using one a big single FBO (w/ all commandline params)
+ * <pre>
+ * java StereoDemo01 -time 10000000 -vignette true -chromatic true -timewarp false -samples 8 -biLinear true -autoSwap false -singleFBO true -mainScreen false
+ * </pre>
+ * No distortions, no multisampling, no filtering, auto-swap and using a big single FBO (worst and fastest)
+ * <pre>
+ * java StereoDemo01 -time 10000000 -vignette false -chromatic false -timewarp false -samples 0 -biLinear false -autoSwap true -singleFBO true
+ * </pre>
+ * Test on main screen:
+ * <pre>
+ * java StereoDemo01 -time 10000000 -mainScreen true
+ * </pre>
+ * Test a 3D SBS Movie:
+ * <pre>
+ * java StereoDemo01 -time 10000000 -filmFile Some_SBS_3D_Movie.mkv
+ * java StereoDemo01 -time 10000000 -filmURI http://whoknows.not/Some_SBS_3D_Movie.mkv
+ * </pre>
+ * <p>
+ * Key 'R' enables/disables the VR's sensors, i.e. head rotation ..
+ * </p>
+ *
+ */
+public class StereoDemo01 {
+ static long duration = 10000; // ms
+
+ static boolean useStereoScreen = true;
+
+ static int numSamples = 0;
+ static boolean biLinear = true;
+ static boolean useSingleFBO = false;
+ static boolean useVignette = true;
+ static boolean useChromatic = true;
+ static boolean useTimewarp = true;
+ static boolean useAutoSwap = false;
+ static String useFilmFile = null;
+ static String useFilmURI = null;
+ static String stereoRendererListenerName = null;
+
+ public static void main(final String args[]) throws InterruptedException, URISyntaxException {
+ for(int i=0; i<args.length; i++) {
+ if(args[i].equals("-time")) {
+ i++;
+ duration = MiscUtils.atol(args[i], duration);
+ } else if(args[i].equals("-samples")) {
+ i++;
+ numSamples = MiscUtils.atoi(args[i], numSamples);
+ } else if(args[i].equals("-biLinear")) {
+ i++;
+ biLinear = MiscUtils.atob(args[i], biLinear);
+ } else if(args[i].equals("-singleFBO")) {
+ i++;
+ useSingleFBO = MiscUtils.atob(args[i], useSingleFBO);
+ } else if(args[i].equals("-vignette")) {
+ i++;
+ useVignette = MiscUtils.atob(args[i], useVignette);
+ } else if(args[i].equals("-chromatic")) {
+ i++;
+ useChromatic = MiscUtils.atob(args[i], useChromatic);
+ } else if(args[i].equals("-timewarp")) {
+ i++;
+ useTimewarp = MiscUtils.atob(args[i], useTimewarp);
+ } else if(args[i].equals("-vignette")) {
+ i++;
+ useVignette = MiscUtils.atob(args[i], useVignette);
+ } else if(args[i].equals("-mainScreen")) {
+ i++;
+ useStereoScreen = !MiscUtils.atob(args[i], useStereoScreen);
+ } else if(args[i].equals("-autoSwap")) {
+ i++;
+ useAutoSwap = MiscUtils.atob(args[i], useAutoSwap);
+ } else if(args[i].equals("-test")) {
+ i++;
+ stereoRendererListenerName = args[i];
+ } else if(args[i].equals("-filmFile")) {
+ i++;
+ useFilmFile = args[i];
+ } else if(args[i].equals("-filmURI")) {
+ i++;
+ useFilmURI = args[i];
+ }
+ }
+ if( null != stereoRendererListenerName ) {
+ try {
+ final Object stereoRendererListener = ReflectionUtil.createInstance(stereoRendererListenerName, null);
+ } catch (final Exception e) {
+ e.printStackTrace();
+ }
+ }
+ final StereoGLEventListener upstream;
+ final MovieSBSStereo movieSimple;
+ final URI movieURI;
+ if( null != useFilmFile ) {
+ movieSimple = new MovieSBSStereo();
+ movieURI = IOUtil.toURISimple(new File(useFilmFile));
+ upstream = movieSimple;
+ } else if( null != useFilmURI ) {
+ movieSimple = new MovieSBSStereo();
+ movieURI = new URI(useFilmURI);
+ upstream = movieSimple;
+ } else {
+ final GearsES2 demo = new GearsES2(0);
+ demo.setVerbose(false);
+ upstream = demo;
+ movieSimple = null;
+ movieURI = null;
+ }
+ final StereoDemo01 demo01 = new StereoDemo01();
+ demo01.doIt(0, upstream, movieSimple, movieURI, biLinear, numSamples, useSingleFBO, useVignette, useChromatic, useTimewarp,
+ useAutoSwap, true /* useAnimator */, false /* exclusiveContext*/);
+ }
+
+ public void doIt(final int stereoDeviceIndex,
+ final StereoGLEventListener upstream, final MovieSBSStereo movieSimple, final URI movieURI,
+ final boolean biLinear, final int numSamples, final boolean useSingleFBO,
+ final boolean useVignette, final boolean useChromatic, final boolean useTimewarp,
+ final boolean useAutoSwap, final boolean useAnimator, final boolean exclusiveContext) throws InterruptedException {
+
+ System.err.println("glob duration "+duration);
+ System.err.println("glob useStereoScreen "+useStereoScreen);
+ System.err.println("biLinear "+biLinear);
+ System.err.println("numSamples "+numSamples);
+ System.err.println("useSingleFBO "+useSingleFBO);
+ System.err.println("useVignette "+useVignette);
+ System.err.println("useChromatic "+useChromatic);
+ System.err.println("useTimewarp "+useTimewarp);
+ System.err.println("useAutoSwap "+useAutoSwap);
+
+ final StereoDeviceFactory stereoDeviceFactory = StereoDeviceFactory.createDefaultFactory();
+ if( null == stereoDeviceFactory ) {
+ System.err.println("No StereoDeviceFactory available");
+ return;
+ }
+
+ final StereoDevice stereoDevice = stereoDeviceFactory.createDevice(stereoDeviceIndex, true /* verbose */);
+ if( null == stereoDevice ) {
+ System.err.println("No StereoDevice.Context available for index "+stereoDeviceIndex);
+ return;
+ }
+
+ // Start the sensor which provides the Rift’s pose and motion.
+ if( !stereoDevice.startSensors(true) ) {
+ System.err.println("Could not start sensors on device "+stereoDeviceIndex);
+ }
+
+ //
+ //
+ //
+ final GLCapabilities caps = new GLCapabilities(GLProfile.getMaxProgrammable(true /* favorHardwareRasterizer */));
+ final GLWindow window = GLWindow.create(caps);
+
+ final PointImmutable devicePos = stereoDevice.getPosition();
+ final DimensionImmutable deviceRes = stereoDevice.getSurfaceSize();
+ window.setSize(deviceRes.getWidth(), deviceRes.getHeight());
+ if( useStereoScreen ) {
+ window.setPosition(devicePos.getX(), devicePos.getY());
+ }
+ window.setAutoSwapBufferMode(useAutoSwap);
+ window.setUndecorated(true);
+
+ final Animator animator = useAnimator ? new Animator() : null;
+ if( useAnimator ) {
+ animator.setModeBits(false, AnimatorBase.MODE_EXPECT_AWT_RENDERING_THREAD);
+ animator.setExclusiveContext(exclusiveContext);
+ }
+
+ //
+ // Oculus Rift setup
+ //
+ // EyePos.y = ovrHmd_GetFloat(HMD, OVR_KEY_EYE_HEIGHT, EyePos.y);
+ final FovHVHalves[] defaultEyeFov = stereoDevice.getDefaultFOV();
+ System.err.println("Default Fov[0]: "+defaultEyeFov[0]);
+ System.err.println("Default Fov[1]: "+defaultEyeFov[1]);
+
+ final float[] eyePositionOffset = null == movieSimple ? StereoDevice.DEFAULT_EYE_POSITION_OFFSET // default
+ : new float[] { 0f, 0.3f, 0f }; // better fixed movie position
+ final int textureUnit = 0;
+ final int distortionBits = ( useVignette ? StereoDeviceRenderer.DISTORTION_VIGNETTE : 0 ) |
+ ( useChromatic ? StereoDeviceRenderer.DISTORTION_CHROMATIC : 0 ) |
+ ( useTimewarp ? StereoDeviceRenderer.DISTORTION_TIMEWARP : 0 );
+ final float pixelsPerDisplayPixel = 1f;
+ final StereoDeviceRenderer stereoDeviceRenderer =
+ stereoDevice.createRenderer(distortionBits, useSingleFBO ? 1 : 2, eyePositionOffset,
+ defaultEyeFov, pixelsPerDisplayPixel, textureUnit);
+ System.err.println("StereoDeviceRenderer: "+stereoDeviceRenderer);
+
+ final int texFilter = biLinear ? GL.GL_LINEAR : GL.GL_NEAREST;
+ final StereoClientRenderer renderer = new StereoClientRenderer(stereoDeviceRenderer, true /* ownsDist */, texFilter, texFilter, numSamples);
+ if( null != movieSimple && null != movieURI) {
+ movieSimple.setScaleOrig(true);
+ final GLMediaPlayer mp = movieSimple.getGLMediaPlayer();
+ mp.attachObject(MovieSimple.WINDOW_KEY, window);
+ mp.attachObject(MovieSBSStereo.STEREO_RENDERER_KEY, renderer);
+ mp.addEventListener(MovieSBSStereo.stereoGLMediaEventListener);
+ movieSimple.initStream(movieURI, GLMediaPlayer.STREAM_ID_AUTO, GLMediaPlayer.STREAM_ID_AUTO, 3);
+ } else {
+ renderer.addGLEventListener(upstream);
+ }
+ window.addGLEventListener(renderer);
+
+ final QuitAdapter quitAdapter = new QuitAdapter();
+ window.addKeyListener(quitAdapter);
+ window.addWindowListener(quitAdapter);
+
+ window.addKeyListener(new KeyAdapter() {
+ public void keyReleased(final KeyEvent e) {
+ if( e.isAutoRepeat() ) {
+ return;
+ }
+ switch(e.getKeySymbol()) {
+ case KeyEvent.VK_R: {
+ stereoDevice.startSensors(!stereoDevice.getSensorsStarted());
+ break;
+ }
+ }
+ } } );
+
+ if( useAnimator ) {
+ animator.add(window);
+ animator.start();
+ }
+ window.setVisible(true);
+ if( useAnimator ) {
+ animator.setUpdateFPSFrames(60, System.err);
+ }
+
+ final long t0 = System.currentTimeMillis();
+ long t1 = t0;
+ while(!quitAdapter.shouldQuit() && t1-t0<duration) {
+ Thread.sleep(100);
+ t1 = System.currentTimeMillis();
+ }
+
+ if( useAnimator ) {
+ animator.stop();
+ }
+ window.destroy();
+ stereoDevice.dispose();
+ }
+}
diff --git a/src/test/com/jogamp/opengl/test/junit/jogl/stereo/ovr/OVRDemo01.java b/src/test/com/jogamp/opengl/test/junit/jogl/stereo/ovr/OVRDemo01.java
deleted file mode 100644
index 1cc07ec22..000000000
--- a/src/test/com/jogamp/opengl/test/junit/jogl/stereo/ovr/OVRDemo01.java
+++ /dev/null
@@ -1,236 +0,0 @@
-/**
- * Copyright 2014 JogAmp Community. All rights reserved.
- *
- * Redistribution and use in source and binary forms, with or without modification, are
- * permitted provided that the following conditions are met:
- *
- * 1. Redistributions of source code must retain the above copyright notice, this list of
- * conditions and the following disclaimer.
- *
- * 2. Redistributions in binary form must reproduce the above copyright notice, this list
- * of conditions and the following disclaimer in the documentation and/or other materials
- * provided with the distribution.
- *
- * THIS SOFTWARE IS PROVIDED BY JogAmp Community ``AS IS'' AND ANY EXPRESS OR IMPLIED
- * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND
- * FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL JogAmp Community OR
- * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
- * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
- * SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON
- * ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
- * NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
- * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
- *
- * The views and conclusions contained in the software and documentation are those of the
- * authors and should not be interpreted as representing official policies, either expressed
- * or implied, of JogAmp Community.
- */
-package com.jogamp.opengl.test.junit.jogl.stereo.ovr;
-
-import javax.media.opengl.GL;
-import javax.media.opengl.GLCapabilities;
-import javax.media.opengl.GLEventListener;
-import javax.media.opengl.GLProfile;
-
-import jogamp.opengl.oculusvr.OVRDistortion;
-
-import com.jogamp.newt.opengl.GLWindow;
-import com.jogamp.oculusvr.OVR;
-import com.jogamp.oculusvr.OVRException;
-import com.jogamp.oculusvr.OVRVersion;
-import com.jogamp.oculusvr.OvrHmdContext;
-import com.jogamp.oculusvr.ovrFovPort;
-import com.jogamp.oculusvr.ovrHmdDesc;
-import com.jogamp.oculusvr.ovrSizei;
-import com.jogamp.oculusvr.ovrVector2i;
-import com.jogamp.opengl.oculusvr.OVRSBSRendererDualFBO;
-import com.jogamp.opengl.oculusvr.OVRSBSRendererSingleFBO;
-import com.jogamp.opengl.test.junit.jogl.demos.es2.GearsES2;
-import com.jogamp.opengl.test.junit.util.MiscUtils;
-import com.jogamp.opengl.test.junit.util.QuitAdapter;
-import com.jogamp.opengl.util.Animator;
-import com.jogamp.opengl.util.AnimatorBase;
-
-/**
- * All distortions, no multisampling, bilinear filtering, manual-swap and using two FBOs (default, good)
- * <pre>
- * java OVRDemo01 -time 10000000
- * </pre>
- * All distortions, 8x multisampling, bilinear filtering, manual-swap and using two FBOs (best - slowest)
- * <pre>
- * java OVRDemo01 -time 10000000 -samples 8
- * </pre>
- * All distortions, 8x multisampling, bilinear filtering, manual-swap and using one a big single FBO (w/ all commandline params)
- * <pre>
- * java OVRDemo01 -time 10000000 -vignette true -chromatic true -timewarp false -samples 8 -biLinear true -autoSwap false -singleFBO true -mainScreen false
- * </pre>
- * No distortions, no multisampling, no filtering, auto-swap and using a big single FBO (worst and fastest)
- * <pre>
- * java OVRDemo01 -time 10000000 -vignette false -chromatic false -timewarp false -samples 0 -biLinear false -autoSwap true -singleFBO true
- * </pre>
- * Test on main screen:
- * <pre>
- * java OVRDemo01 -time 10000000 -mainScreen true
- * </pre>
- *
- */
-public class OVRDemo01 {
- static long duration = 10000; // ms
-
- static boolean useOVRScreen = true;
-
- static int numSamples = 0;
- static boolean biLinear = true;
- static boolean useSingleFBO = false;
- static boolean useVignette = true;
- static boolean useChromatic = true;
- static boolean useTimewarp = true;
- static boolean useAutoSwap = false;
-
- public static void main(final String args[]) throws InterruptedException {
- for(int i=0; i<args.length; i++) {
- if(args[i].equals("-time")) {
- i++;
- duration = MiscUtils.atol(args[i], duration);
- } else if(args[i].equals("-samples")) {
- i++;
- numSamples = MiscUtils.atoi(args[i], numSamples);
- } else if(args[i].equals("-biLinear")) {
- i++;
- biLinear = MiscUtils.atob(args[i], biLinear);
- } else if(args[i].equals("-singleFBO")) {
- i++;
- useSingleFBO = MiscUtils.atob(args[i], useSingleFBO);
- } else if(args[i].equals("-vignette")) {
- i++;
- useVignette = MiscUtils.atob(args[i], useVignette);
- } else if(args[i].equals("-chromatic")) {
- i++;
- useChromatic = MiscUtils.atob(args[i], useChromatic);
- } else if(args[i].equals("-timewarp")) {
- i++;
- useTimewarp = MiscUtils.atob(args[i], useTimewarp);
- } else if(args[i].equals("-vignette")) {
- i++;
- useVignette = MiscUtils.atob(args[i], useVignette);
- } else if(args[i].equals("-mainScreen")) {
- i++;
- useOVRScreen = !MiscUtils.atob(args[i], useOVRScreen);
- } else if(args[i].equals("-autoSwap")) {
- i++;
- useAutoSwap = MiscUtils.atob(args[i], useAutoSwap);
- }
- }
- final OVRDemo01 demo01 = new OVRDemo01();
- demo01.doIt(0, biLinear, numSamples, useSingleFBO, useVignette, useChromatic, useTimewarp,
- useAutoSwap, true /* useAnimator */, false /* exclusiveContext*/);
- }
-
- public void doIt(final int ovrHmdIndex, final boolean biLinear, final int numSamples,
- final boolean useSingleFBO,
- final boolean useVignette, final boolean useChromatic, final boolean useTimewarp,
- final boolean useAutoSwap,
- final boolean useAnimator, final boolean exclusiveContext) throws InterruptedException {
-
- System.err.println("glob duration "+duration);
- System.err.println("glob useOVRScreen "+useOVRScreen);
- System.err.println("biLinear "+biLinear);
- System.err.println("numSamples "+numSamples);
- System.err.println("useSingleFBO "+useSingleFBO);
- System.err.println("useVignette "+useVignette);
- System.err.println("useChromatic "+useChromatic);
- System.err.println("useTimewarp "+useTimewarp);
- System.err.println("useAutoSwap "+useAutoSwap);
-
- // Initialize LibOVR...
- if( !OVR.ovr_Initialize() ) { // recursive ..
- throw new OVRException("OVR not available");
- }
- final OvrHmdContext hmdCtx = OVR.ovrHmd_Create(ovrHmdIndex);
- if( null == hmdCtx ) {
- throw new OVRException("OVR HMD #"+ovrHmdIndex+" not available");
- }
- final ovrHmdDesc hmdDesc = ovrHmdDesc.create();
- OVR.ovrHmd_GetDesc(hmdCtx, hmdDesc);
- System.err.println(OVRVersion.getAvailableCapabilitiesInfo(hmdDesc, ovrHmdIndex, null).toString());
-
- // Start the sensor which provides the Rift’s pose and motion.
- final int requiredSensorCaps = 0;
- final int supportedSensorCaps = requiredSensorCaps | OVR.ovrSensorCap_Orientation | OVR.ovrSensorCap_YawCorrection | OVR.ovrSensorCap_Position;
- if( !OVR.ovrHmd_StartSensor(hmdCtx, supportedSensorCaps, requiredSensorCaps) ) {
- throw new OVRException("OVR HMD #"+ovrHmdIndex+" required sensors not available");
- }
-
- //
- //
- //
-
- final GLCapabilities caps = new GLCapabilities(GLProfile.getMaxProgrammable(true /* favorHardwareRasterizer */));
- final GLWindow window = GLWindow.create(caps);
- final ovrVector2i ovrPos = hmdDesc.getWindowsPos();
- final ovrSizei ovrRes = hmdDesc.getResolution();
- window.setSize(ovrRes.getW(), ovrRes.getH());
- if( useOVRScreen ) {
- window.setPosition(ovrPos.getX(), ovrPos.getY());
- }
- window.setAutoSwapBufferMode(useAutoSwap);
- window.setUndecorated(true);
-
- final Animator animator = useAnimator ? new Animator() : null;
- if( useAnimator ) {
- animator.setModeBits(false, AnimatorBase.MODE_EXPECT_AWT_RENDERING_THREAD);
- animator.setExclusiveContext(exclusiveContext);
- }
-
- //
- // Oculus Rift setup
- //
- final float[] eyePositionOffset = { 0.0f, 1.6f, -5.0f };
- // EyePos.y = ovrHmd_GetFloat(HMD, OVR_KEY_EYE_HEIGHT, EyePos.y);
-
- final ovrFovPort[] defaultEyeFov = hmdDesc.getDefaultEyeFov(0, new ovrFovPort[2]);
- final int distortionCaps = ( useVignette ? OVR.ovrDistortionCap_Vignette : 0 ) |
- ( useChromatic ? OVR.ovrDistortionCap_Chromatic : 0 ) |
- ( useTimewarp ? OVR.ovrDistortionCap_TimeWarp : 0 );
- final float pixelsPerDisplayPixel = 1f;
- final OVRDistortion dist = OVRDistortion.create(hmdCtx, useSingleFBO, eyePositionOffset, defaultEyeFov, pixelsPerDisplayPixel, distortionCaps);
- System.err.println("OVRDistortion: "+dist);
-
- final int texFilter = biLinear ? GL.GL_LINEAR : GL.GL_NEAREST;
- final GearsES2 upstream = new GearsES2(0);
- upstream.setVerbose(false);
- final GLEventListener renderer;
- if( useSingleFBO ) {
- renderer = new OVRSBSRendererSingleFBO(dist, true /* ownsDist */, upstream, texFilter, texFilter, numSamples);
- } else {
- renderer = new OVRSBSRendererDualFBO(dist, true /* ownsDist */, upstream, texFilter, texFilter, numSamples);
- }
- window.addGLEventListener(renderer);
-
- final QuitAdapter quitAdapter = new QuitAdapter();
- window.addKeyListener(quitAdapter);
- window.addWindowListener(quitAdapter);
-
- if( useAnimator ) {
- animator.add(window);
- animator.start();
- }
- window.setVisible(true);
- if( useAnimator ) {
- animator.setUpdateFPSFrames(60, System.err);
- }
-
- final long t0 = System.currentTimeMillis();
- long t1 = t0;
- while(!quitAdapter.shouldQuit() && t1-t0<duration) {
- Thread.sleep(100);
- t1 = System.currentTimeMillis();
- }
-
- if( useAnimator ) {
- animator.stop();
- }
- window.destroy();
- // OVR.ovr_Shutdown();
- }
-}