summaryrefslogtreecommitdiffstats
path: root/src
diff options
context:
space:
mode:
authorSven Gothel <[email protected]>2014-07-07 23:46:19 +0200
committerSven Gothel <[email protected]>2014-07-07 23:46:19 +0200
commit38e51e4a5f6f35c658df10f6d48a33e3ffaea2f3 (patch)
tree259024b16429986ab48fd49a9bd2667dad2b85eb /src
parent06fc570f70dc5ccfad7399d8426bdf224c239a5a (diff)
Bug 1021: Add GenericStereoDevice* Supporting custom configurations; Hook-in oculusvr-sdk java distortion-mesh calculation if available
StereoDeviceFactory support new GenericStereoDeviceFactory, with it's GenericStereoDevice and GenericStereoDeviceRenderer. GenericStereoDevice maintains different configurations, triggered either by passing a GenericStereoDevice.Config instance directly or by the device-index parameter: - 0: monoscopi device: No post-processing - 1: stereoscopic device SBS: No post-processing - 2: stereoscopic device SBS + Lenses: Distortion post-processing (only available w/ oculusvr-sdk sub-module) Producing a 'GenericStereoDevice.Config' instance is self containing and may extend if supporting more device types like top-bottom, interlaced etc. StereoDemo01 handles all use-cases and may be used as a test-bed to add and experiment with stereoscopy, devices and settings.
Diffstat (limited to 'src')
-rw-r--r--src/jogl/classes/com/jogamp/opengl/math/FloatUtil.java4
-rw-r--r--src/jogl/classes/com/jogamp/opengl/math/FovHVHalves.java71
-rw-r--r--src/jogl/classes/com/jogamp/opengl/math/VectorUtil.java64
-rw-r--r--src/jogl/classes/com/jogamp/opengl/util/PMVMatrix.java2
-rw-r--r--src/jogl/classes/com/jogamp/opengl/util/stereo/StereoClientRenderer.java40
-rw-r--r--src/jogl/classes/com/jogamp/opengl/util/stereo/StereoDevice.java88
-rw-r--r--src/jogl/classes/com/jogamp/opengl/util/stereo/StereoDeviceFactory.java33
-rw-r--r--src/jogl/classes/com/jogamp/opengl/util/stereo/StereoDeviceRenderer.java37
-rw-r--r--src/jogl/classes/com/jogamp/opengl/util/stereo/StereoUtil.java2
-rw-r--r--src/jogl/classes/jogamp/opengl/util/stereo/DistortionMesh.java95
-rw-r--r--src/jogl/classes/jogamp/opengl/util/stereo/GenericStereoDevice.java457
-rw-r--r--src/jogl/classes/jogamp/opengl/util/stereo/GenericStereoDeviceFactory.java43
-rw-r--r--src/jogl/classes/jogamp/opengl/util/stereo/GenericStereoDeviceRenderer.java605
-rw-r--r--src/jogl/classes/jogamp/opengl/util/stereo/ScaleAndOffset2D.java107
-rw-r--r--src/jogl/classes/jogamp/opengl/util/stereo/shader/dist01_chroma.fp26
-rw-r--r--src/jogl/classes/jogamp/opengl/util/stereo/shader/dist01_chroma.vp33
-rw-r--r--src/jogl/classes/jogamp/opengl/util/stereo/shader/dist01_plain.fp22
-rw-r--r--src/jogl/classes/jogamp/opengl/util/stereo/shader/dist01_plain.vp27
-rw-r--r--src/jogl/classes/jogamp/opengl/util/stereo/shader/dist01_timewarp.vp44
-rw-r--r--src/jogl/classes/jogamp/opengl/util/stereo/shader/dist01_timewarp_chroma.vp65
-rw-r--r--src/oculusvr/classes/jogamp/opengl/oculusvr/OVRStereoDevice.java64
-rw-r--r--src/oculusvr/classes/jogamp/opengl/oculusvr/OVRStereoDeviceFactory.java3
-rw-r--r--src/oculusvr/classes/jogamp/opengl/oculusvr/OVRStereoDeviceRenderer.java134
-rw-r--r--src/oculusvr/classes/jogamp/opengl/oculusvr/OVRUtil.java19
-rw-r--r--src/test/com/jogamp/opengl/test/junit/jogl/stereo/StereoDemo01.java81
25 files changed, 2010 insertions, 156 deletions
diff --git a/src/jogl/classes/com/jogamp/opengl/math/FloatUtil.java b/src/jogl/classes/com/jogamp/opengl/math/FloatUtil.java
index d2e535eaf..3a3568697 100644
--- a/src/jogl/classes/com/jogamp/opengl/math/FloatUtil.java
+++ b/src/jogl/classes/com/jogamp/opengl/math/FloatUtil.java
@@ -559,7 +559,7 @@ public final class FloatUtil {
* @param initM if true, given matrix will be initialized w/ identity matrix,
* otherwise only the frustum fields are set.
* @param fovy_rad angle in radians
- * @param aspect
+ * @param aspect aspect ratio width / height
* @param zNear
* @param zFar
* @return given matrix for chaining
@@ -591,7 +591,7 @@ public final class FloatUtil {
*/
public static float[] makePerspective(final float[] m, final int m_offset, final boolean initM,
final FovHVHalves fovhv, final float zNear, final float zFar) {
- final FovHVHalves fovhvTan = fovhv.getInTangents(); // use tangent of half-fov !
+ final FovHVHalves fovhvTan = fovhv.toTangents(); // use tangent of half-fov !
final float top = fovhvTan.top * zNear;
final float bottom = -1.0f * fovhvTan.bottom * zNear;
final float left = -1.0f * fovhvTan.left * zNear;
diff --git a/src/jogl/classes/com/jogamp/opengl/math/FovHVHalves.java b/src/jogl/classes/com/jogamp/opengl/math/FovHVHalves.java
index 786d146e6..26ed57009 100644
--- a/src/jogl/classes/com/jogamp/opengl/math/FovHVHalves.java
+++ b/src/jogl/classes/com/jogamp/opengl/math/FovHVHalves.java
@@ -69,26 +69,87 @@ public final class FovHVHalves {
/**
* Returns a symmetrical centered {@link FovHVHalves} instance in tangents, using:
* <pre>
- final float halfHorizFovTan = (float)Math.tan(horizontalFov/2f);
- final float halfVertFovTan = (float)Math.tan(verticalFov/2f);
+ halfHorizFovTan = tan( horizontalFov / 2f );
+ halfVertFovTan = tan( verticalFov / 2f );
* </pre>
* @param horizontalFov whole horizontal FOV in radians
* @param verticalFov whole vertical FOV in radians
*/
- public static FovHVHalves createByRadians(final float horizontalFov, final float verticalFov) {
+ public static FovHVHalves byRadians(final float horizontalFov, final float verticalFov) {
final float halfHorizFovTan = FloatUtil.tan(horizontalFov/2f);
final float halfVertFovTan = FloatUtil.tan(verticalFov/2f);
return new FovHVHalves(halfHorizFovTan, halfHorizFovTan, halfVertFovTan, halfVertFovTan, true);
}
/**
- * Returns this instance values <i>in tangent</i> values.
+ * Returns a symmetrical centered {@link FovHVHalves} instance in tangents, using:
+ * <pre>
+ top = bottom = tan( verticalFov / 2f );
+ left = right = aspect * top;
+ * </pre>
+ *
+ * @param verticalFov vertical FOV in radians
+ * @param aspect aspect ration width / height
+ */
+ public static FovHVHalves byFovyRadianAndAspect(final float verticalFov, final float aspect) {
+ final float halfVertFovTan = FloatUtil.tan(verticalFov/2f);
+ final float halfHorizFovTan = aspect * halfVertFovTan;
+ return new FovHVHalves(halfHorizFovTan, halfHorizFovTan,
+ halfVertFovTan, halfVertFovTan, true);
+ }
+
+ /**
+ * Returns a custom symmetry {@link FovHVHalves} instance in tangents, using:
+ * <pre>
+ left = tan( horizontalFov * horizCenterFromLeft )
+ right = tan( horizontalFov * ( 1f - horizCenterFromLeft ) )
+ top = tan( verticalFov * vertCenterFromTop )
+ bottom = tan( verticalFov * (1f - vertCenterFromTop ) )
+ * </pre>
+ * @param horizontalFov whole horizontal FOV in radians
+ * @param horizCenterFromLeft horizontal center from left in [0..1]
+ * @param verticalFov whole vertical FOV in radians
+ * @param vertCenterFromTop vertical center from top in [0..1]
+ */
+ public static FovHVHalves byRadians(final float horizontalFov, final float horizCenterFromLeft,
+ final float verticalFov, final float vertCenterFromTop) {
+ return new FovHVHalves(FloatUtil.tan(horizontalFov * horizCenterFromLeft),
+ FloatUtil.tan(horizontalFov * ( 1f - horizCenterFromLeft )),
+ FloatUtil.tan(verticalFov * vertCenterFromTop),
+ FloatUtil.tan(verticalFov * (1f - vertCenterFromTop )),
+ true);
+ }
+
+ /**
+ * Returns a custom symmetry {@link FovHVHalves} instance in tangents,
+ * via computing the <code>horizontalFov</code> using:
+ * <pre>
+ halfVertFovTan = tan( verticalFov / 2f );
+ halfHorizFovTan = aspect * halfVertFovTan;
+ horizontalFov = atan( halfHorizFovTan ) * 2f;
+ return {@link #byRadians(float, float, float, float) byRadians}(horizontalFov, horizCenterFromLeft, verticalFov, vertCenterFromTop)
+ * </pre>
+ * @param verticalFov whole vertical FOV in radians
+ * @param vertCenterFromTop vertical center from top in [0..1]
+ * @param aspect aspect ration width / height
+ * @param horizCenterFromLeft horizontal center from left in [0..1]
+ */
+ public static FovHVHalves byFovyRadianAndAspect(final float verticalFov, final float vertCenterFromTop,
+ final float aspect, final float horizCenterFromLeft) {
+ final float halfVertFovTan = FloatUtil.tan(verticalFov/2f);
+ final float halfHorizFovTan = aspect * halfVertFovTan;
+ final float horizontalFov = FloatUtil.atan(halfHorizFovTan) * 2f;
+ return byRadians(horizontalFov, horizCenterFromLeft, verticalFov, vertCenterFromTop);
+ }
+
+ /**
+ * Returns this instance <i>in tangent</i> values.
* <p>
* If this instance is {@link #inTangents} already, method returns this instance,
* otherwise a newly created instance w/ converted values to tangent.
* </p>
*/
- public final FovHVHalves getInTangents() {
+ public final FovHVHalves toTangents() {
if( inTangents ) {
return this;
} else {
diff --git a/src/jogl/classes/com/jogamp/opengl/math/VectorUtil.java b/src/jogl/classes/com/jogamp/opengl/math/VectorUtil.java
index c11c2bd2b..36222cf4a 100644
--- a/src/jogl/classes/com/jogamp/opengl/math/VectorUtil.java
+++ b/src/jogl/classes/com/jogamp/opengl/math/VectorUtil.java
@@ -393,7 +393,7 @@ public final class VectorUtil {
}
/**
- * Scales a vector by param using given result float[]
+ * Scales a vector by param using given result float[], result = vector * scale
* @param result vector for the result, may be vector (in-place)
* @param vector input vector
* @param scale single scale constant for all vector components
@@ -406,7 +406,7 @@ public final class VectorUtil {
}
/**
- * Scales a vector by param using given result float[]
+ * Scales a vector by param using given result float[], result = vector * scale
* @param result vector for the result, may be vector (in-place)
* @param vector input vector
* @param scale single scale constant for all vector components
@@ -420,7 +420,7 @@ public final class VectorUtil {
}
/**
- * Scales a vector by param using given result float[]
+ * Scales a vector by param using given result float[], result = vector * scale
* @param result vector for the result, may be vector (in-place)
* @param vector input vector
* @param scale 3 component scale constant for each vector component
@@ -435,7 +435,7 @@ public final class VectorUtil {
}
/**
- * Scales a vector by param using given result float[]
+ * Scales a vector by param using given result float[], result = vector * scale
* @param result vector for the result, may be vector (in-place)
* @param vector input vector
* @param scale 2 component scale constant for each vector component
@@ -449,6 +449,62 @@ public final class VectorUtil {
}
/**
+ * Divides a vector by param using given result float[], result = vector / scale
+ * @param result vector for the result, may be vector (in-place)
+ * @param vector input vector
+ * @param scale single scale constant for all vector components
+ * @return result vector for chaining
+ */
+ public static float[] divVec2(final float[] result, final float[] vector, final float scale) {
+ result[0] = vector[0] / scale;
+ result[1] = vector[1] / scale;
+ return result;
+ }
+
+ /**
+ * Divides a vector by param using given result float[], result = vector / scale
+ * @param result vector for the result, may be vector (in-place)
+ * @param vector input vector
+ * @param scale single scale constant for all vector components
+ * @return result vector for chaining
+ */
+ public static float[] divVec3(final float[] result, final float[] vector, final float scale) {
+ result[0] = vector[0] / scale;
+ result[1] = vector[1] / scale;
+ result[2] = vector[2] / scale;
+ return result;
+ }
+
+ /**
+ * Divides a vector by param using given result float[], result = vector / scale
+ * @param result vector for the result, may be vector (in-place)
+ * @param vector input vector
+ * @param scale 3 component scale constant for each vector component
+ * @return result vector for chaining
+ */
+ public static float[] divVec3(final float[] result, final float[] vector, final float[] scale)
+ {
+ result[0] = vector[0] / scale[0];
+ result[1] = vector[1] / scale[1];
+ result[2] = vector[2] / scale[2];
+ return result;
+ }
+
+ /**
+ * Divides a vector by param using given result float[], result = vector / scale
+ * @param result vector for the result, may be vector (in-place)
+ * @param vector input vector
+ * @param scale 2 component scale constant for each vector component
+ * @return result vector for chaining
+ */
+ public static float[] divVec2(final float[] result, final float[] vector, final float[] scale)
+ {
+ result[0] = vector[0] / scale[0];
+ result[1] = vector[1] / scale[1];
+ return result;
+ }
+
+ /**
* Adds two vectors, result = v1 + v2
* @param result float[2] result vector, may be either v1 or v2 (in-place)
* @param v1 vector 1
diff --git a/src/jogl/classes/com/jogamp/opengl/util/PMVMatrix.java b/src/jogl/classes/com/jogamp/opengl/util/PMVMatrix.java
index 289183b8e..11acb0c58 100644
--- a/src/jogl/classes/com/jogamp/opengl/util/PMVMatrix.java
+++ b/src/jogl/classes/com/jogamp/opengl/util/PMVMatrix.java
@@ -673,7 +673,7 @@ public final class PMVMatrix implements GLMatrixFunc {
* {@link #glMultMatrixf(FloatBuffer) Multiply} the {@link #glGetMatrixMode() current matrix} with the perspective/frustum matrix.
*
* @param fovy_deg fov angle in degrees
- * @param aspect aspect ratio
+ * @param aspect aspect ratio width / height
* @param zNear
* @param zFar
*/
diff --git a/src/jogl/classes/com/jogamp/opengl/util/stereo/StereoClientRenderer.java b/src/jogl/classes/com/jogamp/opengl/util/stereo/StereoClientRenderer.java
index 9f9ebdf2a..dfb676456 100644
--- a/src/jogl/classes/com/jogamp/opengl/util/stereo/StereoClientRenderer.java
+++ b/src/jogl/classes/com/jogamp/opengl/util/stereo/StereoClientRenderer.java
@@ -58,7 +58,6 @@ public class StereoClientRenderer implements GLEventListener {
private final FBObject[] fbos;
private final int magFilter;
private final int minFilter;
- private final boolean usePostprocessing;
private int numSamples;
private final TextureAttachment[] fboTexs;
@@ -71,7 +70,6 @@ public class StereoClientRenderer implements GLEventListener {
}
this.helper = new GLDrawableHelper();
this.deviceRenderer = deviceRenderer;
- this.usePostprocessing = deviceRenderer.ppRequired() || deviceRenderer.usesSideBySideStereo() && fboCount > 1;
this.ownsDevice = ownsDevice;
this.magFilter = magFilter;
this.minFilter = minFilter;
@@ -179,26 +177,31 @@ public class StereoClientRenderer implements GLEventListener {
final int fboCount = fbos.length;
final int displayRepeatFlags;
- if( 1 == fboCount ) {
+ if( 1 >= fboCount ) {
displayRepeatFlags = CustomGLEventListener.DISPLAY_DONTCLEAR;
} else {
displayRepeatFlags = 0;
}
+ final int[] eyeOrder = deviceRenderer.getDevice().getEyeRenderOrder();
+ final int eyeCount = eyeOrder.length;
+
// Update eye pos upfront to have same (almost) results
- deviceRenderer.updateEyePose(0);
- deviceRenderer.updateEyePose(1);
+ for(int eyeNum=0; eyeNum<eyeCount; eyeNum++) {
+ deviceRenderer.updateEyePose(eyeNum);
+ }
if( 1 == fboCount ) {
fbos[0].bind(gl);
}
- for(int eyeNum=0; eyeNum<2; eyeNum++) {
+ for(int eyeNum=0; eyeNum<eyeCount; eyeNum++) {
+ final int eyeName = eyeOrder[eyeNum];
if( 1 < fboCount ) {
- fbos[eyeNum].bind(gl);
+ fbos[eyeName].bind(gl);
}
- final StereoDeviceRenderer.Eye eye = deviceRenderer.getEye(eyeNum);
+ final StereoDeviceRenderer.Eye eye = deviceRenderer.getEye(eyeName);
final RectangleImmutable viewport = eye.getViewport();
gl.glViewport(viewport.getX(), viewport.getY(), viewport.getWidth(), viewport.getHeight());
@@ -213,28 +216,31 @@ public class StereoClientRenderer implements GLEventListener {
helper.runForAllGLEventListener(drawable, reshapeDisplayAction);
if( 1 < fboCount ) {
- fbos[eyeNum].unbind(gl);
+ fbos[eyeName].unbind(gl);
}
}
+
if( 1 == fboCount ) {
fbos[0].unbind(gl);
}
// restore viewport
gl.glViewport(0, 0, drawable.getSurfaceWidth(), drawable.getSurfaceHeight());
- if( usePostprocessing ) {
+ if( deviceRenderer.ppAvailable() ) {
deviceRenderer.ppBegin(gl);
if( 1 == fboCount ) {
fbos[0].use(gl, fboTexs[0]);
- deviceRenderer.ppBothEyes(gl);
+ for(int eyeNum=0; eyeNum<eyeCount; eyeNum++) {
+ deviceRenderer.ppOneEye(gl, eyeOrder[eyeNum]);
+ }
fbos[0].unuse(gl);
} else {
- fbos[0].use(gl, fboTexs[0]);
- deviceRenderer.ppOneEye(gl, 0);
- fbos[0].unuse(gl);
- fbos[1].use(gl, fboTexs[1]);
- deviceRenderer.ppOneEye(gl, 1);
- fbos[1].unuse(gl);
+ for(int eyeNum=0; eyeNum<eyeCount; eyeNum++) {
+ final int eyeName = eyeOrder[eyeNum];
+ fbos[eyeName].use(gl, fboTexs[eyeName]);
+ deviceRenderer.ppOneEye(gl, eyeName);
+ fbos[eyeName].unuse(gl);
+ }
}
deviceRenderer.ppEnd(gl);
}
diff --git a/src/jogl/classes/com/jogamp/opengl/util/stereo/StereoDevice.java b/src/jogl/classes/com/jogamp/opengl/util/stereo/StereoDevice.java
index e5c0e3646..2091d0843 100644
--- a/src/jogl/classes/com/jogamp/opengl/util/stereo/StereoDevice.java
+++ b/src/jogl/classes/com/jogamp/opengl/util/stereo/StereoDevice.java
@@ -39,14 +39,12 @@ import com.jogamp.opengl.math.FovHVHalves;
*/
public interface StereoDevice {
public static final boolean DEBUG = Debug.debug("StereoDevice");
+ public static final boolean DUMP_DATA = Debug.isPropertyDefined("jogl.debug.StereoDevice.DumpData", true);
- /**
- * Default eye position offset for {@link #createRenderer(int, int, float[], FovHVHalves[], float)}.
- * <p>
- * Default offset is 1.6f <i>up</i> and 5.0f <i>away</i>.
- * </p>
- */
- public static final float[] DEFAULT_EYE_POSITION_OFFSET = { 0.0f, 1.6f, -5.0f };
+ /** Merely a class providing a type-tag for extensions */
+ public static class Config {
+ // NOP
+ }
/** Disposes this {@link StereoDevice}. */
public void dispose();
@@ -66,7 +64,25 @@ public interface StereoDevice {
public DimensionImmutable getSurfaceSize();
/**
- * Returns the device default {@link FovHVHalves} per eye.
+ * Return the device default eye position offset for {@link #createRenderer(int, int, float[], FovHVHalves[], float)}.
+ * <p>
+ * Result is an array of float values for
+ * <ul>
+ * <li><i>right</i> (positive)</li>
+ * <li><i>up</i> (positive)</li>
+ * <li><i>forward</i> (negative)</li>
+ * </ul>
+ * </p>
+ * @return
+ */
+ public float[] getDefaultEyePositionOffset();
+
+ /**
+ * Returns the device default {@link FovHVHalves} for all supported eyes
+ * in natural order, i.e. left and right if supported.
+ * <p>
+ * Monoscopic devices return an array length of one, without the value for the right-eye!
+ * </p>
*/
public FovHVHalves[] getDefaultFOV();
@@ -77,12 +93,60 @@ public interface StereoDevice {
public boolean getSensorsStarted();
/**
+ * Returns an array of the preferred eye rendering order.
+ * The array length reflects the supported eye count.
+ * <p>
+ * Monoscopic devices only support one eye, where stereoscopic device two eyes.
+ * </p>
+ */
+ public int[] getEyeRenderOrder();
+
+ /**
+ * Returns the supported distortion compensation by the {@link StereoDeviceRenderer},
+ * e.g. {@link StereoDeviceRenderer#DISTORTION_BARREL}, {@link StereoDeviceRenderer#DISTORTION_CHROMATIC}, etc.
+ * @see StereoDeviceRenderer#getDistortionBits()
+ * @see #createRenderer(int, int, float[], FovHVHalves[], float, int)
+ * @see #getRecommendedDistortionBits()
+ * @see #getMinimumDistortionBits()
+ */
+ public int getSupportedDistortionBits();
+
+ /**
+ * Returns the recommended distortion compensation bits for the {@link StereoDeviceRenderer},
+ * e.g. {@link StereoDeviceRenderer#DISTORTION_BARREL}, {@link StereoDeviceRenderer#DISTORTION_CHROMATIC}
+ * {@link StereoDeviceRenderer#DISTORTION_VIGNETTE}.
+ * <p>
+ * User shall use the recommended distortion compensation to achieve a distortion free view.
+ * </p>
+ * @see StereoDeviceRenderer#getDistortionBits()
+ * @see #createRenderer(int, int, float[], FovHVHalves[], float, int)
+ * @see #getSupportedDistortionBits()
+ * @see #getMinimumDistortionBits()
+ */
+ public int getRecommendedDistortionBits();
+
+ /**
+ * Returns the minimum distortion compensation bits as required by the {@link StereoDeviceRenderer},
+ * e.g. {@link StereoDeviceRenderer#DISTORTION_BARREL} in case the stereoscopic display uses [a]spherical lenses.
+ * <p>
+ * Minimum distortion compensation bits are being enforced by the {@link StereoDeviceRenderer}.
+ * </p>
+ * @see #getSupportedDistortionBits()
+ * @see #getRecommendedDistortionBits()
+ * @see StereoDeviceRenderer#getDistortionBits()
+ * @see #createRenderer(int, int, float[], FovHVHalves[], float, int)
+ */
+ public int getMinimumDistortionBits();
+
+ /**
* Create a new {@link StereoDeviceRenderer} instance.
*
- * @param distortionBits {@link StereoDeviceRenderer} distortion bits, e.g. {@link StereoDeviceRenderer#DISTORTION_BARREL}, etc.
- * @param textureCount desired texture count for post-processing, see {@link StereoDeviceRenderer#getTextureCount()} and {@link StereoDeviceRenderer#ppRequired()}
- * @param eyePositionOffset eye position offset, e.g. {@link #DEFAULT_EYE_POSITION_OFFSET}.
- * @param eyeFov FovHVHalves[2] field-of-view per eye
+ * @param distortionBits {@link StereoDeviceRenderer} distortion bits, e.g. {@link StereoDeviceRenderer#DISTORTION_BARREL}, etc,
+ * see {@link #getRecommendedDistortionBits()}.
+ * @param textureCount desired texture count for post-processing, see {@link StereoDeviceRenderer#getTextureCount()} and {@link StereoDeviceRenderer#ppAvailable()}
+ * @param eyePositionOffset eye position offset, e.g. {@link #getDefaultEyePositionOffset()}.
+ * @param eyeFov FovHVHalves[] field-of-view per eye, e.g. {@link #getDefaultFOV()}. May contain only one value for monoscopic devices,
+ * see {@link #getEyeRenderOrder()}.
* @param pixelsPerDisplayPixel
* @param textureUnit
* @return
diff --git a/src/jogl/classes/com/jogamp/opengl/util/stereo/StereoDeviceFactory.java b/src/jogl/classes/com/jogamp/opengl/util/stereo/StereoDeviceFactory.java
index d9054ce28..46ce82f03 100644
--- a/src/jogl/classes/com/jogamp/opengl/util/stereo/StereoDeviceFactory.java
+++ b/src/jogl/classes/com/jogamp/opengl/util/stereo/StereoDeviceFactory.java
@@ -31,26 +31,43 @@ import com.jogamp.common.util.ReflectionUtil;
/**
* Platform agnostic {@link StereoDevice} factory.
+ * <p>
+ * To implement a new {@link StereoDevice}, the following interfaces/classes must be implemented:
+ * <ul>
+ * <li>{@link StereoDeviceFactory}</li>
+ * <li>{@link StereoDevice}</li>
+ * <li>{@link StereoDeviceRenderer}</li>
+ * </ul>
+ * </p>
*/
public abstract class StereoDeviceFactory {
private static final String OVRStereoDeviceClazzName = "jogamp.opengl.oculusvr.OVRStereoDeviceFactory";
- private static final Object[] ctorArgs;
+ private static final String GenericStereoDeviceClazzName = "jogamp.opengl.util.stereo.GenericStereoDeviceFactory";
private static final String isAvailableMethodName = "isAvailable";
- static {
- ctorArgs = new Object[6];
- ctorArgs[0] = null;
+ public static enum DeviceType { Default, Generic, OculusVR };
- }
public static StereoDeviceFactory createDefaultFactory() {
final ClassLoader cl = StereoDeviceFactory.class.getClassLoader();
- final StereoDeviceFactory sink = createFactory(cl, OVRStereoDeviceClazzName);
+ StereoDeviceFactory sink = createFactory(cl, OVRStereoDeviceClazzName);
if( null == sink ) {
- // sink = create(cl, ANYOTHERCLAZZNAME);
+ sink = createFactory(cl, GenericStereoDeviceClazzName);
}
return sink;
}
+ public static StereoDeviceFactory createFactory(final DeviceType type) {
+ final String className;
+ switch( type ) {
+ case Default: return createDefaultFactory();
+ case Generic: className = GenericStereoDeviceClazzName; break;
+ case OculusVR: className = OVRStereoDeviceClazzName; break;
+ default: throw new InternalError("XXX");
+ }
+ final ClassLoader cl = StereoDeviceFactory.class.getClassLoader();
+ return createFactory(cl, className);
+ }
+
public static StereoDeviceFactory createFactory(final ClassLoader cl, final String implName) {
try {
if(((Boolean)ReflectionUtil.callStaticMethod(implName, isAvailableMethodName, null, null, cl)).booleanValue()) {
@@ -60,5 +77,5 @@ public abstract class StereoDeviceFactory {
return null;
}
- public abstract StereoDevice createDevice(final int deviceIndex, final boolean verbose);
+ public abstract StereoDevice createDevice(final int deviceIndex, final StereoDevice.Config config, final boolean verbose);
}
diff --git a/src/jogl/classes/com/jogamp/opengl/util/stereo/StereoDeviceRenderer.java b/src/jogl/classes/com/jogamp/opengl/util/stereo/StereoDeviceRenderer.java
index fd94f6bc3..1805b71bc 100644
--- a/src/jogl/classes/com/jogamp/opengl/util/stereo/StereoDeviceRenderer.java
+++ b/src/jogl/classes/com/jogamp/opengl/util/stereo/StereoDeviceRenderer.java
@@ -41,13 +41,13 @@ import javax.media.opengl.GL;
* <li>device.{@link #beginFrame(GL)}</li>
* <li>For both eyes:<ul>
* <li>device.{@link #updateEyePose(int)}</li>
- * <li>if device.{@link #ppRequired()}: Set the render target, e.g. FBO</li>
+ * <li>if device.{@link #ppAvailable()}: Set the render target, e.g. FBO</li>
* <li>Set the viewport using {@link Eye#getViewport()}</li>
* <li>{@link StereoGLEventListener#reshapeForEye(javax.media.opengl.GLAutoDrawable, int, int, int, int, EyeParameter, EyePose) upstream.reshapeEye(..)}</li>
* <li>{@link StereoGLEventListener#display(javax.media.opengl.GLAutoDrawable, int) upstream.display(..)}.</li>
* </ul></li>
* <li>Reset the viewport</li>
- * <li>If device.{@link #ppRequired()}:<ul>
+ * <li>If device.{@link #ppAvailable()}:<ul>
* <li>device.{@link #ppBegin(GL)}</li>
* <li>Use render target, e.g. FBO's texture</li>
* <li>device.{@link #ppBothEyes(GL)} or device.{@link #ppOneEye(GL, int)} for both eyes</li>
@@ -116,10 +116,10 @@ public interface StereoDeviceRenderer {
public EyePose updateEyePose(final int eyeNum);
/**
- * Returns distortion compensation bits, e.g. {@link #DISTORTION_BARREL},
+ * Returns used distortion compensation bits, e.g. {@link #DISTORTION_BARREL},
* in case the stereoscopic display requires such, i.e. in case lenses are utilized.
* <p>
- * Distortion requires {@link #ppRequired() post-processing}.
+ * Distortion requires {@link #ppAvailable() post-processing}.
* </p>
*/
public int getDistortionBits();
@@ -133,7 +133,7 @@ public interface StereoDeviceRenderer {
* </p>
* <p>
* Either the renderer presents the images <i>side-by-side</i> according to the {@link Eye#getViewport() eye's viewport},
- * or {@link #ppRequired() post-processing} is utilized to merge {@link #getTextureCount() textures}
+ * or {@link #ppAvailable() post-processing} is utilized to merge {@link #getTextureCount() textures}
* to a <i>side-by-side</i> configuration.
* </p>
*/
@@ -156,7 +156,7 @@ public interface StereoDeviceRenderer {
public DimensionImmutable getTotalSurfaceSize();
/**
- * Returns the used texture-image count for post-processing, see {@link #ppRequired()}.
+ * Returns the used texture-image count for post-processing, see {@link #ppAvailable()}.
* <p>
* In case the renderer does not support multiple textures for post-processing,
* or no post-processing at all, method returns zero despite the request
@@ -165,7 +165,7 @@ public interface StereoDeviceRenderer {
*/
public int getTextureCount();
- /** Returns the desired texture-image unit for post-processing, see {@link #ppRequired()}. */
+ /** Returns the desired texture-image unit for post-processing, see {@link #ppAvailable()}. */
public int getTextureUnit();
/** Initialize OpenGL related resources */
@@ -181,13 +181,12 @@ public interface StereoDeviceRenderer {
public void endFrame(final GL gl);
/**
- * Returns <code>true</code> if stereoscopic post-processing is required,
+ * Returns <code>true</code> if stereoscopic post-processing is required and available,
* otherwise <code>false</code>.
* <p>
- * Stereoscopic post-processing is usually required if:
+ * Stereoscopic post-processing is available if:
* <ul>
- * <li>one of the <i>distortion</i> modes are set, i.e. {@link #usesBarrelDistortion()}</li>
- * <li>texture-images are being used, see {@link #getTextureCount()}</li>
+ * <li>one of the <i>distortion</i> bits are set, see {@link #getDistortionBits()}</li>
* </ul>
* </p>
* <p>
@@ -195,15 +194,15 @@ public interface StereoDeviceRenderer {
* the following post-processing methods must be called to before {@link #endFrame()}:
* <ul>
* <li>{@link #ppBegin(GL)}</li>
- * <li>{@link #ppBothEyes(GL)} or {@link #ppOneEye(GL, int)} for both eyes</li>
+ * <li>{@link #ppOneEye(GL, int)} for both eyes</li>
* <li>{@link #ppEnd(GL)}</li>
* </ul>
* </p>
*/
- public boolean ppRequired();
+ public boolean ppAvailable();
/**
- * Begin stereoscopic post-processing, see {@link #ppRequired()}.
+ * Begin stereoscopic post-processing, see {@link #ppAvailable()}.
* <p>
* {@link #updateEyePose(int)} for both eyes must be called upfront
* when rendering upstream {@link StereoGLEventListener}.
@@ -214,20 +213,14 @@ public interface StereoDeviceRenderer {
public void ppBegin(final GL gl);
/**
- * Performs stereoscopic post-processing for both eyes, see {@link #ppRequired()}.
- * @param gl
- */
- public void ppBothEyes(final GL gl);
-
- /**
- * Performs stereoscopic post-processing for one eye, see {@link #ppRequired()}.
+ * Performs stereoscopic post-processing for one eye, see {@link #ppAvailable()}.
* @param gl
* @param eyeNum
*/
public void ppOneEye(final GL gl, final int eyeNum);
/**
- * End stereoscopic post-processing, see {@link #ppRequired()}.
+ * End stereoscopic post-processing, see {@link #ppAvailable()}.
* @param gl
*/
public void ppEnd(final GL gl);
diff --git a/src/jogl/classes/com/jogamp/opengl/util/stereo/StereoUtil.java b/src/jogl/classes/com/jogamp/opengl/util/stereo/StereoUtil.java
index 280d99233..3031013b8 100644
--- a/src/jogl/classes/com/jogamp/opengl/util/stereo/StereoUtil.java
+++ b/src/jogl/classes/com/jogamp/opengl/util/stereo/StereoUtil.java
@@ -50,7 +50,7 @@ public class StereoUtil {
final StringBuilder sb = new StringBuilder();
if( usesBarrelDistortion(distortionBits) ) {
if( appendComma ) { sb.append(", "); };
- sb.append("barrell"); appendComma=true;
+ sb.append("barrel"); appendComma=true;
}
if( usesVignetteDistortion(distortionBits) ) {
if( appendComma ) { sb.append(", "); };
diff --git a/src/jogl/classes/jogamp/opengl/util/stereo/DistortionMesh.java b/src/jogl/classes/jogamp/opengl/util/stereo/DistortionMesh.java
new file mode 100644
index 000000000..7a2483121
--- /dev/null
+++ b/src/jogl/classes/jogamp/opengl/util/stereo/DistortionMesh.java
@@ -0,0 +1,95 @@
+/**
+ * Copyright 2014 JogAmp Community. All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or without modification, are
+ * permitted provided that the following conditions are met:
+ *
+ * 1. Redistributions of source code must retain the above copyright notice, this list of
+ * conditions and the following disclaimer.
+ *
+ * 2. Redistributions in binary form must reproduce the above copyright notice, this list
+ * of conditions and the following disclaimer in the documentation and/or other materials
+ * provided with the distribution.
+ *
+ * THIS SOFTWARE IS PROVIDED BY JogAmp Community ``AS IS'' AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND
+ * FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL JogAmp Community OR
+ * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+ * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
+ * SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON
+ * ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
+ * NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
+ * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ *
+ * The views and conclusions contained in the software and documentation are those of the
+ * authors and should not be interpreted as representing official policies, either expressed
+ * or implied, of JogAmp Community.
+ */
+package jogamp.opengl.util.stereo;
+
+import com.jogamp.opengl.util.stereo.EyeParameter;
+
+public class DistortionMesh {
+ public static interface Producer {
+ /** Initialize */
+ void init(final GenericStereoDevice.Config deviceConfig, final float[] eyeReliefInMeters);
+
+ /** Distortion Mesh Producer */
+ DistortionMesh create(final EyeParameter eyeParam, final int distortionBits);
+ }
+ public static class DistortionVertex {
+ /** {@value} */
+ public static final int def_pos_size = 2;
+ /** {@value} */
+ public static final int def_vignetteFactor_size = 1;
+ /** {@value} */
+ public static final int def_timewarpFactor_size = 1;
+ /** {@value} */
+ public static final int def_texR_size = 2;
+ /** {@value} */
+ public static final int def_texG_size = 2;
+ /** {@value} */
+ public static final int def_texB_size = 2;
+
+ /** {@value} */
+ public static final int def_total_size = def_pos_size + def_vignetteFactor_size + def_timewarpFactor_size +
+ def_texR_size + def_texG_size + def_texB_size;
+
+ public DistortionVertex(final float[] data, final int pos_size,
+ final int vignetteFactor_size, final int timewarpFactor_size, final int texR_size,
+ final int texG_size, final int texB_size) {
+ this.data = data;
+ this.pos_size = pos_size;
+ this.vignetteFactor_size = vignetteFactor_size;
+ this.timewarpFactor_size = timewarpFactor_size;
+ this.texR_size = texR_size;
+ this.texG_size = texG_size;
+ this.texB_size = texB_size;
+ }
+ final float[] data;
+
+ /** Usually {@link #def_pos_size} */
+ final int pos_size;
+ /** Usually {@link #def_vignetteFactor_size} */
+ final int vignetteFactor_size;
+ /** Usually {@link #def_timewarpFactor_size} */
+ final int timewarpFactor_size;
+ /** Usually {@link #def_texR_size} */
+ final int texR_size;
+ /** Usually {@link #def_texG_size} */
+ final int texG_size;
+ /** Usually {@link #def_texB_size} */
+ final int texB_size;
+ }
+ public DistortionMesh(final DistortionMesh.DistortionVertex[] vertices, final int vertexCount,
+ final short[] indices, final int indexCount) {
+ this.vertices = vertices;
+ this.vertexCount = vertexCount;
+ this.indices = indices;
+ this.indexCount = indexCount;
+ }
+ final DistortionMesh.DistortionVertex[] vertices;
+ final int vertexCount;
+ final short[] indices;
+ final int indexCount;
+}
diff --git a/src/jogl/classes/jogamp/opengl/util/stereo/GenericStereoDevice.java b/src/jogl/classes/jogamp/opengl/util/stereo/GenericStereoDevice.java
new file mode 100644
index 000000000..d7fb95d91
--- /dev/null
+++ b/src/jogl/classes/jogamp/opengl/util/stereo/GenericStereoDevice.java
@@ -0,0 +1,457 @@
+/**
+ * Copyright 2014 JogAmp Community. All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or without modification, are
+ * permitted provided that the following conditions are met:
+ *
+ * 1. Redistributions of source code must retain the above copyright notice, this list of
+ * conditions and the following disclaimer.
+ *
+ * 2. Redistributions in binary form must reproduce the above copyright notice, this list
+ * of conditions and the following disclaimer in the documentation and/or other materials
+ * provided with the distribution.
+ *
+ * THIS SOFTWARE IS PROVIDED BY JogAmp Community ``AS IS'' AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND
+ * FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL JogAmp Community OR
+ * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+ * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
+ * SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON
+ * ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
+ * NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
+ * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ *
+ * The views and conclusions contained in the software and documentation are those of the
+ * authors and should not be interpreted as representing official policies, either expressed
+ * or implied, of JogAmp Community.
+ */
+package jogamp.opengl.util.stereo;
+
+import java.util.Arrays;
+
+import javax.media.nativewindow.util.Dimension;
+import javax.media.nativewindow.util.DimensionImmutable;
+import javax.media.nativewindow.util.Point;
+import javax.media.nativewindow.util.PointImmutable;
+import javax.media.nativewindow.util.Rectangle;
+import javax.media.nativewindow.util.RectangleImmutable;
+
+import com.jogamp.common.util.ReflectionUtil;
+import com.jogamp.opengl.math.FloatUtil;
+import com.jogamp.opengl.math.FovHVHalves;
+import com.jogamp.opengl.util.stereo.EyeParameter;
+import com.jogamp.opengl.util.stereo.StereoDevice;
+import com.jogamp.opengl.util.stereo.StereoDeviceRenderer;
+import com.jogamp.opengl.util.stereo.StereoUtil;
+
+public class GenericStereoDevice implements StereoDevice {
+ public static enum ShutterType {
+ Global, RollingLeftToRight, RollingRightToLeft, RollingTopToBottom
+ }
+ public static class Config extends StereoDevice.Config {
+ public Config(final String name,
+ final ShutterType shutterType,
+ final DimensionImmutable surfaceSizeInPixels,
+ final float[] screenSizeInMeters,
+ final DimensionImmutable eyeTextureSize,
+ final float pupilCenterFromScreenTopInMeters,
+ final float interpupillaryDistanceInMeters,
+ final int[] eyeRenderOrder,
+ final EyeParameter[] defaultEyeParam,
+ final DistortionMesh.Producer distortionMeshProducer,
+ final int supportedDistortionBits,
+ final int recommendedDistortionBits,
+ final int minimumDistortionBits
+ ) {
+ this.name = name;
+ this.shutterType = shutterType;
+ this.surfaceSizeInPixels = surfaceSizeInPixels;
+ this.screenSizeInMeters = screenSizeInMeters;
+ this.eyeTextureSize = eyeTextureSize;
+ this.pupilCenterFromScreenTopInMeters = pupilCenterFromScreenTopInMeters;
+ this.interpupillaryDistanceInMeters = interpupillaryDistanceInMeters;
+ this.eyeRenderOrder = eyeRenderOrder;
+ this.defaultEyeParam = defaultEyeParam;
+ this.distortionMeshProducer = distortionMeshProducer;
+ this.supportedDistortionBits = supportedDistortionBits;
+ this.recommendedDistortionBits = recommendedDistortionBits;
+ this.minimumDistortionBits = minimumDistortionBits;
+ this.pupilCenterFromTopLeft = new float[2][2];
+ calcPupilCenterFromTopLeft();
+ }
+ /** A variation w/ different surface/screen specs */
+ public Config(final Config source,
+ final DimensionImmutable surfaceSizeInPixels,
+ final float[] screenSizeInMeters,
+ final DimensionImmutable eyeTextureSize) {
+ this.name = source.name;
+ this.shutterType = source.shutterType;
+ this.surfaceSizeInPixels = surfaceSizeInPixels;
+ this.screenSizeInMeters = screenSizeInMeters;
+ this.eyeTextureSize = eyeTextureSize;
+ this.pupilCenterFromScreenTopInMeters = source.pupilCenterFromScreenTopInMeters;
+ this.interpupillaryDistanceInMeters = source.interpupillaryDistanceInMeters;
+ this.eyeRenderOrder = source.eyeRenderOrder;
+ this.defaultEyeParam = source.defaultEyeParam;
+ this.distortionMeshProducer = source.distortionMeshProducer;
+ this.supportedDistortionBits = source.supportedDistortionBits;
+ this.recommendedDistortionBits = source.recommendedDistortionBits;
+ this.minimumDistortionBits = source.minimumDistortionBits;
+ this.pupilCenterFromTopLeft = new float[2][2];
+ calcPupilCenterFromTopLeft();
+ }
+ private void calcPupilCenterFromTopLeft() {
+ final float visibleWidthOfOneEye = 0.5f * screenSizeInMeters[0];
+ final float leftPupilCenterFromLeftInMeters = ( screenSizeInMeters[0] - interpupillaryDistanceInMeters ) * 0.5f;
+ final float rightPupilCenterFromMiddleInMeters = leftPupilCenterFromLeftInMeters + interpupillaryDistanceInMeters - visibleWidthOfOneEye;
+ pupilCenterFromTopLeft[0][0] = leftPupilCenterFromLeftInMeters / visibleWidthOfOneEye;
+ pupilCenterFromTopLeft[0][1] = pupilCenterFromScreenTopInMeters / screenSizeInMeters[1];
+ pupilCenterFromTopLeft[1][0] = rightPupilCenterFromMiddleInMeters / visibleWidthOfOneEye;
+ pupilCenterFromTopLeft[1][1] = pupilCenterFromTopLeft[0][1];
+ }
+
+ /**
+ * Return the vertical pupil center from the screen top in the range [0..1].
+ * @param screenHeightInMeters
+ * @param pupilCenterFromScreenTopInMeters
+ */
+ public static float getVertPupilCenterFromTop(final float screenHeightInMeters, final float pupilCenterFromScreenTopInMeters) {
+ return pupilCenterFromScreenTopInMeters / screenHeightInMeters;
+ }
+
+ /**
+ * Return the horizontal pupil center from the left side for both eyes in the range [0..1].
+ * <pre>
+ <-------------left eye------------->| |<-----------right eye-------------->
+ <------------------------------------screenSizeInMeters.Width----------------------------------->
+ <------interpupillaryDistanceInMeters------>
+ <--centerFromLeftInMeters->
+ ^
+ center of pupil
+ * </pre>
+ *
+ * @param screenWidthInMeters
+ * @param interpupillaryDistanceInMeters
+ */
+ public static float[] getHorizPupilCenterFromLeft(final float screenWidthInMeters, final float interpupillaryDistanceInMeters) {
+ final float visibleWidthOfOneEye = 0.5f * screenWidthInMeters;
+ final float leftPupilCenterFromLeftInMeters = ( screenWidthInMeters - interpupillaryDistanceInMeters ) * 0.5f;
+ final float rightPupilCenterFromMiddleInMeters = leftPupilCenterFromLeftInMeters + interpupillaryDistanceInMeters - visibleWidthOfOneEye;
+ return new float[] { leftPupilCenterFromLeftInMeters / visibleWidthOfOneEye,
+ rightPupilCenterFromMiddleInMeters / visibleWidthOfOneEye };
+ }
+
+ private void init() {
+ final float[] eyeReliefInMeters = new float[defaultEyeParam.length];
+ if( 0 < defaultEyeParam.length ) {
+ eyeReliefInMeters[0] = defaultEyeParam[0].eyeReliefZ;
+ }
+ if( 1 < defaultEyeParam.length ) {
+ eyeReliefInMeters[1] = defaultEyeParam[1].eyeReliefZ;
+ }
+ if( null != distortionMeshProducer ) {
+ distortionMeshProducer.init(this, eyeReliefInMeters);
+ }
+ }
+
+ @Override
+ public String toString() { return "StereoConfig["+name+", shutter "+shutterType+", surfaceSize "+surfaceSizeInPixels+
+ ", screenSize "+screenSizeInMeters[0]+" x "+screenSizeInMeters[0]+
+ " [m], eyeTexSize "+eyeTextureSize+", IPD "+interpupillaryDistanceInMeters+
+ " [m], eyeParam "+Arrays.toString(defaultEyeParam)+
+ ", distortionBits[supported ["+StereoUtil.distortionBitsToString(supportedDistortionBits)+
+ "], recommended ["+StereoUtil.distortionBitsToString(recommendedDistortionBits)+
+ "], minimum ["+StereoUtil.distortionBitsToString(minimumDistortionBits)+"]]]";
+ }
+
+ /** Configuration Name */
+ public final String name;
+ public final ShutterType shutterType;
+
+ public final DimensionImmutable surfaceSizeInPixels;
+ public final float[] screenSizeInMeters;
+ /** Texture size per eye */
+ public final DimensionImmutable eyeTextureSize;
+
+ /** Vertical distance from pupil to screen-top in meters */
+ public final float pupilCenterFromScreenTopInMeters;
+ /** Horizontal interpupillary distance (IPD) in meters */
+ public final float interpupillaryDistanceInMeters;
+ /**
+ * Pupil center from top left per eye, ranging from [0..1], maybe used to produce FovHVHalves,
+ * see {@link #getHorizPupilCenterFromLeft(float, float)} and {@link #getVertPupilCenterFromTop(float, float)}.
+ */
+ public final float[/*per-eye*/][/*xy*/] pupilCenterFromTopLeft;
+ public final int[] eyeRenderOrder;
+ public final EyeParameter[] defaultEyeParam;
+ public final DistortionMesh.Producer distortionMeshProducer;
+
+ public final int supportedDistortionBits;
+ public final int recommendedDistortionBits;
+ public final int minimumDistortionBits;
+ }
+
+ /** A mono view configuration, only one eye is supported */
+ public static final Config config01Mono01;
+
+ /** A default stereo SBS view configuration */
+ public static final Config config02StereoSBS01;
+
+ /** A default stereo SBS lense view configuration, utilizing similar settings as OculusVR DK1 */
+ public static final Config config03StereoSBSLense01;
+
+ private static final Config[] configs;
+
+ static {
+ final float[] DEFAULT_EYE_POSITION_OFFSET_STEREO_LENSES = { 0.0f, 1.6f, -5.0f }; // 1.6 up, 5 forward
+ final float[] DEFAULT_EYE_POSITION_OFFSET_STEREO = { 0.0f, 0.3f, 3.0f }; // 0.3 up, 3 back
+ final float[] DEFAULT_EYE_POSITION_OFFSET_MONO = { 0.0f, 0.0f, 3.0f }; // 3 back
+
+ final float d2r = FloatUtil.PI / 180.0f;
+ {
+ config01Mono01 = new Config(
+ "Def01Mono01",
+ ShutterType.RollingTopToBottom,
+ new Dimension(1280, 800), // resolution
+ new float[] { 0.1498f, 0.0936f }, // screenSize [m]
+ new Dimension(1280, 800), // eye textureSize
+ 0.0936f/2f, // pupilCenterFromScreenTop [m]
+ 0.0635f, // IPD [m]
+ new int[] { 0 }, // eye order
+ new EyeParameter[] {
+ new EyeParameter(0, DEFAULT_EYE_POSITION_OFFSET_MONO,
+ // degrees: 45/2 l, 45/2 r, 45/2 * aspect t, 45/2 * aspect b
+ FovHVHalves.byFovyRadianAndAspect(45f*d2r, 1280f / 800f),
+ 0f /* distNoseToPupil */, 0f /* verticalDelta */, 0f /* eyeReliefInMeters */) },
+ null, // mash producer distortion bits
+ 0, // supported distortion bits
+ 0, // recommended distortion bits
+ 0 // minimum distortion bits
+ );
+ }
+
+ {
+ final DimensionImmutable surfaceSizeInPixel = new Dimension(1280, 800);
+ final float[] screenSizeInMeters = new float[] { 0.1498f, 0.0936f };
+ final float interpupillaryDistanceInMeters = 0.0635f;
+ final float pupilCenterFromScreenTopInMeters = screenSizeInMeters[1] / 2f;
+ final float[] horizPupilCenterFromLeft = Config.getHorizPupilCenterFromLeft(screenSizeInMeters[0], interpupillaryDistanceInMeters);
+ final float vertPupilCenterFromTop = Config.getVertPupilCenterFromTop(screenSizeInMeters[1], pupilCenterFromScreenTopInMeters);
+ final float fovy = 45f;
+ final float aspect = ( surfaceSizeInPixel.getWidth() / 2.0f ) / surfaceSizeInPixel.getHeight();
+ final FovHVHalves defaultSBSEyeFovLeft = FovHVHalves.byFovyRadianAndAspect(fovy * d2r, vertPupilCenterFromTop, aspect, horizPupilCenterFromLeft[0]);
+ final FovHVHalves defaultSBSEyeFovRight = FovHVHalves.byFovyRadianAndAspect(fovy * d2r, vertPupilCenterFromTop, aspect, horizPupilCenterFromLeft[1]);
+
+ config02StereoSBS01 = new Config(
+ "Def02StereoSBS01",
+ ShutterType.RollingTopToBottom,
+ surfaceSizeInPixel, // resolution
+ screenSizeInMeters, // screenSize [m]
+ new Dimension(1280/2, 800), // eye textureSize
+ 0.0936f/2f, // pupilCenterFromScreenTop [m]
+ interpupillaryDistanceInMeters, // IPD [m]
+ new int[] { 0, 1 }, // eye order
+ new EyeParameter[] {
+ new EyeParameter(0, DEFAULT_EYE_POSITION_OFFSET_STEREO, defaultSBSEyeFovLeft,
+ 0.032f /* distNoseToPupil */, 0f /* verticalDelta */, 0.010f /* eyeReliefInMeters */),
+ new EyeParameter(1, DEFAULT_EYE_POSITION_OFFSET_STEREO, defaultSBSEyeFovRight,
+ -0.032f /* distNoseToPupil */, 0f /* verticalDelta */, 0.010f /* eyeReliefInMeters */) },
+ null, // mash producer distortion bits
+ 0, // supported distortion bits
+ 0, // recommended distortion bits
+ 0 // minimum distortion bits
+ );
+ }
+
+ {
+ DistortionMesh.Producer lenseDistMeshProduce = null;
+ try {
+ lenseDistMeshProduce =
+ (DistortionMesh.Producer)
+ ReflectionUtil.createInstance("jogamp.opengl.oculusvr.stereo.lense.DistortionMeshProducer", GenericStereoDevice.class.getClassLoader());
+ } catch (final Throwable t) {
+ if(StereoDevice.DEBUG) { System.err.println("Caught: "+t.getMessage()); t.printStackTrace(); }
+ }
+
+ final DimensionImmutable surfaceSizeInPixel = new Dimension(1280, 800);
+ final float[] screenSizeInMeters = new float[] { 0.1498f, 0.0936f };
+ final DimensionImmutable eyeTextureSize = new Dimension(1122, 1553);
+ final float interpupillaryDistanceInMeters = 0.0635f;
+ final float pupilCenterFromScreenTopInMeters = screenSizeInMeters[1] / 2f;
+ final float[] horizPupilCenterFromLeft = Config.getHorizPupilCenterFromLeft(screenSizeInMeters[0], interpupillaryDistanceInMeters);
+ final float vertPupilCenterFromTop = Config.getVertPupilCenterFromTop(screenSizeInMeters[1], pupilCenterFromScreenTopInMeters);
+ final float fovy = 129f;
+ final float aspect = eyeTextureSize.getWidth() / eyeTextureSize.getHeight();
+ final FovHVHalves defaultSBSEyeFovLenseLeft = FovHVHalves.byFovyRadianAndAspect(fovy * d2r, vertPupilCenterFromTop, aspect, horizPupilCenterFromLeft[0]);
+ final FovHVHalves defaultSBSEyeFovLenseRight = FovHVHalves.byFovyRadianAndAspect(fovy * d2r, vertPupilCenterFromTop, aspect, horizPupilCenterFromLeft[1]);
+
+ config03StereoSBSLense01 = null == lenseDistMeshProduce ? null :
+ new Config(
+ "Def03StereoSBSLense01",
+ ShutterType.RollingTopToBottom,
+ surfaceSizeInPixel, // resolution
+ screenSizeInMeters, // screenSize [m]
+ eyeTextureSize, // eye textureSize
+ pupilCenterFromScreenTopInMeters, // pupilCenterFromScreenTop [m]
+ interpupillaryDistanceInMeters, // IPD [m]
+ new int[] { 0, 1 }, // eye order
+ new EyeParameter[] {
+ new EyeParameter(0, DEFAULT_EYE_POSITION_OFFSET_STEREO_LENSES, defaultSBSEyeFovLenseLeft,
+ 0.032f /* distNoseToPupil */, 0f /* verticalDelta */, 0.010f /* eyeReliefInMeters */),
+ new EyeParameter(1, DEFAULT_EYE_POSITION_OFFSET_STEREO_LENSES, defaultSBSEyeFovLenseRight,
+ -0.032f /* distNoseToPupil */, 0f /* verticalDelta */, 0.010f /* eyeReliefInMeters */) },
+ lenseDistMeshProduce,
+ // supported distortion bits
+ StereoDeviceRenderer.DISTORTION_BARREL | StereoDeviceRenderer.DISTORTION_CHROMATIC | StereoDeviceRenderer.DISTORTION_VIGNETTE,
+ // recommended distortion bits
+ StereoDeviceRenderer.DISTORTION_BARREL | StereoDeviceRenderer.DISTORTION_CHROMATIC | StereoDeviceRenderer.DISTORTION_VIGNETTE,
+ // minimum distortion bits
+ StereoDeviceRenderer.DISTORTION_BARREL
+ );
+ }
+ configs = new Config[] { config01Mono01, config02StereoSBS01, config03StereoSBSLense01 };
+ }
+
+ public final int deviceIndex;
+ public final Config config;
+
+ public final Point surfacePos;
+ private final FovHVHalves[] defaultEyeFov;
+
+ private boolean sensorsStarted = false;
+
+ public GenericStereoDevice(final int deviceIndex, final StereoDevice.Config customConfig) {
+ this.deviceIndex = deviceIndex;
+
+ if( customConfig instanceof GenericStereoDevice.Config) {
+ this.config = (GenericStereoDevice.Config) customConfig;
+ } else {
+ final int cfgIdx = Math.min(deviceIndex % 10, configs.length-1);
+ this.config = null != configs[cfgIdx] ? configs[cfgIdx] : config02StereoSBS01;
+ }
+ config.init();
+
+ this.surfacePos = new Point(0, 0);
+
+ defaultEyeFov = new FovHVHalves[config.defaultEyeParam.length];
+ for(int i=0; i<defaultEyeFov.length; i++) {
+ defaultEyeFov[i] = config.defaultEyeParam[i].fovhv;
+ }
+ }
+
+ @Override
+ public String toString() {
+ return "GenericStereoDevice["+config+", surfacePos "+surfacePos+"]";
+ }
+
+ public void setSurfacePosition(final int x, final int y) {
+ surfacePos.set(x, y);
+ }
+
+ @Override
+ public final void dispose() {
+ // NOP
+ }
+
+ @Override
+ public final PointImmutable getPosition() {
+ return surfacePos;
+ }
+
+ @Override
+ public final DimensionImmutable getSurfaceSize() {
+ return config.surfaceSizeInPixels;
+ }
+
+ @Override
+ public float[] getDefaultEyePositionOffset() {
+ return config.defaultEyeParam[0].positionOffset;
+ }
+
+ @Override
+ public final FovHVHalves[] getDefaultFOV() {
+ return defaultEyeFov;
+ }
+
+ @Override
+ public final boolean startSensors(final boolean start) {
+ if( start && !sensorsStarted ) {
+ if( startSensorsImpl(true) ) {
+ sensorsStarted = true;
+ return true;
+ } else {
+ sensorsStarted = false;
+ return false;
+ }
+ } else if( sensorsStarted ) {
+ if( startSensorsImpl(false) ) {
+ sensorsStarted = false;
+ return true;
+ } else {
+ sensorsStarted = true;
+ return false;
+ }
+ } else {
+ // No state change -> Success
+ return true;
+ }
+ }
+ private boolean startSensorsImpl(final boolean start) { return start; }
+
+ @Override
+ public boolean getSensorsStarted() { return sensorsStarted; }
+
+ @Override
+ public int[] getEyeRenderOrder() { return config.eyeRenderOrder; }
+
+ @Override
+ public int getSupportedDistortionBits() {
+ return config.supportedDistortionBits;
+ };
+
+ @Override
+ public int getRecommendedDistortionBits() {
+ return config.recommendedDistortionBits;
+ }
+
+ @Override
+ public int getMinimumDistortionBits() {
+ return config.minimumDistortionBits;
+ }
+
+ @Override
+ public final StereoDeviceRenderer createRenderer(final int distortionBits,
+ final int textureCount, final float[] eyePositionOffset,
+ final FovHVHalves[] eyeFov, final float pixelsPerDisplayPixel, final int textureUnit) {
+ final EyeParameter[] eyeParam = new EyeParameter[eyeFov.length];
+ for(int i=0; i<eyeParam.length; i++) {
+ final EyeParameter defaultEyeParam = config.defaultEyeParam[i];
+ eyeParam[i] = new EyeParameter(i, eyePositionOffset, eyeFov[i],
+ defaultEyeParam.distNoseToPupilX, defaultEyeParam.distMiddleToPupilY, defaultEyeParam.eyeReliefZ);
+ }
+
+ final RectangleImmutable[] eyeViewports = new RectangleImmutable[eyeParam.length];
+ final DimensionImmutable eyeTextureSize = config.eyeTextureSize;
+ final DimensionImmutable totalTextureSize;
+ if( 1 < eyeParam.length ) {
+ // Stereo SBS
+ totalTextureSize = new Dimension(eyeTextureSize.getWidth()*2, eyeTextureSize.getHeight());
+ if( 1 == textureCount ) { // validated in ctor below!
+ eyeViewports[0] = new Rectangle(0, 0,
+ totalTextureSize.getWidth() / 2, totalTextureSize.getHeight());
+
+ eyeViewports[1] = new Rectangle((totalTextureSize.getWidth() + 1) / 2, 0,
+ totalTextureSize.getWidth() / 2, totalTextureSize.getHeight());
+ } else {
+ eyeViewports[0] = new Rectangle(0, 0, eyeTextureSize.getWidth(), eyeTextureSize.getHeight());
+ eyeViewports[1] = eyeViewports[0];
+ }
+ } else {
+ // Mono
+ totalTextureSize = eyeTextureSize;
+ eyeViewports[0] = new Rectangle(0, 0, totalTextureSize.getWidth(), totalTextureSize.getHeight());
+ }
+ return new GenericStereoDeviceRenderer(this, distortionBits, textureCount, eyePositionOffset, eyeParam, pixelsPerDisplayPixel, textureUnit,
+ eyeTextureSize, totalTextureSize, eyeViewports);
+ }
+} \ No newline at end of file
diff --git a/src/jogl/classes/jogamp/opengl/util/stereo/GenericStereoDeviceFactory.java b/src/jogl/classes/jogamp/opengl/util/stereo/GenericStereoDeviceFactory.java
new file mode 100644
index 000000000..a59e8d833
--- /dev/null
+++ b/src/jogl/classes/jogamp/opengl/util/stereo/GenericStereoDeviceFactory.java
@@ -0,0 +1,43 @@
+/**
+ * Copyright 2014 JogAmp Community. All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or without modification, are
+ * permitted provided that the following conditions are met:
+ *
+ * 1. Redistributions of source code must retain the above copyright notice, this list of
+ * conditions and the following disclaimer.
+ *
+ * 2. Redistributions in binary form must reproduce the above copyright notice, this list
+ * of conditions and the following disclaimer in the documentation and/or other materials
+ * provided with the distribution.
+ *
+ * THIS SOFTWARE IS PROVIDED BY JogAmp Community ``AS IS'' AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND
+ * FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL JogAmp Community OR
+ * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+ * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
+ * SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON
+ * ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
+ * NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
+ * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ *
+ * The views and conclusions contained in the software and documentation are those of the
+ * authors and should not be interpreted as representing official policies, either expressed
+ * or implied, of JogAmp Community.
+ */
+package jogamp.opengl.util.stereo;
+
+import com.jogamp.opengl.util.stereo.StereoDevice;
+import com.jogamp.opengl.util.stereo.StereoDeviceFactory;
+
+public class GenericStereoDeviceFactory extends StereoDeviceFactory {
+
+ public static boolean isAvailable() {
+ return true;
+ }
+
+ @Override
+ public final StereoDevice createDevice(final int deviceIndex, final StereoDevice.Config config, final boolean verbose) {
+ return new GenericStereoDevice(deviceIndex, config);
+ }
+}
diff --git a/src/jogl/classes/jogamp/opengl/util/stereo/GenericStereoDeviceRenderer.java b/src/jogl/classes/jogamp/opengl/util/stereo/GenericStereoDeviceRenderer.java
new file mode 100644
index 000000000..d957bd4e7
--- /dev/null
+++ b/src/jogl/classes/jogamp/opengl/util/stereo/GenericStereoDeviceRenderer.java
@@ -0,0 +1,605 @@
+/**
+ * Copyright 2014 JogAmp Community. All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or without modification, are
+ * permitted provided that the following conditions are met:
+ *
+ * 1. Redistributions of source code must retain the above copyright notice, this list of
+ * conditions and the following disclaimer.
+ *
+ * 2. Redistributions in binary form must reproduce the above copyright notice, this list
+ * of conditions and the following disclaimer in the documentation and/or other materials
+ * provided with the distribution.
+ *
+ * THIS SOFTWARE IS PROVIDED BY JogAmp Community ``AS IS'' AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND
+ * FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL JogAmp Community OR
+ * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+ * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
+ * SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON
+ * ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
+ * NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
+ * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ *
+ * The views and conclusions contained in the software and documentation are those of the
+ * authors and should not be interpreted as representing official policies, either expressed
+ * or implied, of JogAmp Community.
+ */
+package jogamp.opengl.util.stereo;
+
+import java.nio.FloatBuffer;
+import java.nio.ShortBuffer;
+
+import javax.media.nativewindow.util.Dimension;
+import javax.media.nativewindow.util.DimensionImmutable;
+import javax.media.nativewindow.util.RectangleImmutable;
+import javax.media.opengl.GL;
+import javax.media.opengl.GL2ES2;
+import javax.media.opengl.GLArrayData;
+import javax.media.opengl.GLException;
+import javax.media.opengl.GLUniformData;
+
+import jogamp.common.os.PlatformPropsImpl;
+
+import com.jogamp.common.nio.Buffers;
+import com.jogamp.common.os.Platform;
+import com.jogamp.opengl.JoglVersion;
+import com.jogamp.opengl.util.GLArrayDataServer;
+import com.jogamp.opengl.util.glsl.ShaderCode;
+import com.jogamp.opengl.util.glsl.ShaderProgram;
+import com.jogamp.opengl.util.stereo.EyeParameter;
+import com.jogamp.opengl.util.stereo.EyePose;
+import com.jogamp.opengl.util.stereo.StereoDevice;
+import com.jogamp.opengl.util.stereo.StereoDeviceRenderer;
+import com.jogamp.opengl.util.stereo.StereoUtil;
+
+/**
+ * Generic Stereo Device Distortion and OpenGL Renderer Utility
+ */
+public class GenericStereoDeviceRenderer implements StereoDeviceRenderer {
+ private static final String shaderPrefix01 = "dist01";
+ private static final String shaderTimewarpSuffix = "_timewarp";
+ private static final String shaderChromaSuffix = "_chroma";
+ private static final String shaderPlainSuffix = "_plain";
+
+ public static class GenericEye implements StereoDeviceRenderer.Eye {
+ private final int eyeName;
+ private final int distortionBits;
+ private final int vertexCount;
+ private final int indexCount;
+ private final RectangleImmutable viewport;
+
+ private final GLUniformData eyeToSourceUVScale;
+ private final GLUniformData eyeToSourceUVOffset;
+ private final GLUniformData eyeRotationStart;
+ private final GLUniformData eyeRotationEnd;
+
+ /** 2+2+2+2+2: { vec2 position, vec2 color, vec2 texCoordR, vec2 texCoordG, vec2 texCoordB } */
+ private final GLArrayDataServer iVBO;
+ private final GLArrayData vboPos, vboParams, vboTexCoordsR, vboTexCoordsG, vboTexCoordsB;
+ private final GLArrayDataServer indices;
+
+ private final EyeParameter eyeParameter;
+
+ private final EyePose eyePose;
+
+ @Override
+ public final RectangleImmutable getViewport() { return viewport; }
+
+ @Override
+ public final EyeParameter getEyeParameter() { return eyeParameter; }
+
+ @Override
+ public final EyePose getLastEyePose() { return eyePose; }
+
+ private GenericEye(final GenericStereoDevice device, final int distortionBits,
+ final float[] eyePositionOffset, final EyeParameter eyeParam,
+ final DimensionImmutable textureSize, final RectangleImmutable eyeViewport) {
+ this.eyeName = eyeParam.number;
+ this.distortionBits = distortionBits;
+ this.viewport = eyeViewport;
+
+ final boolean usePP = null != device.config.distortionMeshProducer && 0 != distortionBits;
+
+ final boolean usesTimewarp = usePP && StereoUtil.usesTimewarpDistortion(distortionBits);
+ final FloatBuffer fstash = Buffers.newDirectFloatBuffer( 2 + 2 + ( usesTimewarp ? 16 + 16 : 0 ) ) ;
+
+ if( usePP ) {
+ eyeToSourceUVScale = new GLUniformData("svr_EyeToSourceUVScale", 2, Buffers.slice2Float(fstash, 0, 2));
+ eyeToSourceUVOffset = new GLUniformData("svr_EyeToSourceUVOffset", 2, Buffers.slice2Float(fstash, 2, 2));
+ } else {
+ eyeToSourceUVScale = null;
+ eyeToSourceUVOffset = null;
+ }
+
+ if( usesTimewarp ) {
+ eyeRotationStart = new GLUniformData("svr_EyeRotationStart", 4, 4, Buffers.slice2Float(fstash, 4, 16));
+ eyeRotationEnd = new GLUniformData("svr_EyeRotationEnd", 4, 4, Buffers.slice2Float(fstash, 20, 16));
+ } else {
+ eyeRotationStart = null;
+ eyeRotationEnd = null;
+ }
+
+ this.eyeParameter = eyeParam;
+
+ this.eyePose = new EyePose(eyeName);
+
+ updateEyePose(device); // 1st init
+
+ // Setup: eyeToSourceUVScale, eyeToSourceUVOffset
+ if( usePP ) {
+ final ScaleAndOffset2D textureScaleAndOffset = new ScaleAndOffset2D(eyeParam.fovhv, textureSize, eyeViewport);
+ if( StereoDevice.DEBUG ) {
+ System.err.println("XXX."+eyeName+": eyeParam "+eyeParam);
+ System.err.println("XXX."+eyeName+": uvScaleOffset "+textureScaleAndOffset);
+ System.err.println("XXX."+eyeName+": textureSize "+textureSize);
+ System.err.println("XXX."+eyeName+": viewport "+eyeViewport);
+ }
+ final FloatBuffer eyeToSourceUVScaleFB = eyeToSourceUVScale.floatBufferValue();
+ eyeToSourceUVScaleFB.put(0, textureScaleAndOffset.scale[0]);
+ eyeToSourceUVScaleFB.put(1, textureScaleAndOffset.scale[1]);
+ final FloatBuffer eyeToSourceUVOffsetFB = eyeToSourceUVOffset.floatBufferValue();
+ eyeToSourceUVOffsetFB.put(0, textureScaleAndOffset.offset[0]);
+ eyeToSourceUVOffsetFB.put(1, textureScaleAndOffset.offset[1]);
+ } else {
+ vertexCount = 0;
+ indexCount = 0;
+ iVBO = null;
+ vboPos = null;
+ vboParams = null;
+ vboTexCoordsR = null;
+ vboTexCoordsG = null;
+ vboTexCoordsB = null;
+ indices = null;
+ if( StereoDevice.DEBUG ) {
+ System.err.println("XXX."+eyeName+": "+this);
+ }
+ return;
+ }
+ final DistortionMesh meshData = device.config.distortionMeshProducer.create(eyeParam, distortionBits);
+ if( null == meshData ) {
+ throw new GLException("Failed to create meshData for eye "+eyeParam+", and "+StereoUtil.distortionBitsToString(distortionBits));
+ }
+
+ vertexCount = meshData.vertexCount;
+ indexCount = meshData.indexCount;
+
+ /** 2+2+2+2+2: { vec2 position, vec2 color, vec2 texCoordR, vec2 texCoordG, vec2 texCoordB } */
+ final boolean useChromatic = StereoUtil.usesChromaticDistortion(distortionBits);
+ final boolean useVignette = StereoUtil.usesVignetteDistortion(distortionBits);
+
+ final int compsPerElement = 2+2+2+( useChromatic ? 2+2 /* texCoordG + texCoordB */: 0 );
+ iVBO = GLArrayDataServer.createGLSLInterleaved(compsPerElement, GL.GL_FLOAT, false, vertexCount, GL.GL_STATIC_DRAW);
+ vboPos = iVBO.addGLSLSubArray("svr_Position", 2, GL.GL_ARRAY_BUFFER);
+ vboParams = iVBO.addGLSLSubArray("svr_Params", 2, GL.GL_ARRAY_BUFFER);
+ vboTexCoordsR = iVBO.addGLSLSubArray("svr_TexCoordR", 2, GL.GL_ARRAY_BUFFER);
+ if( useChromatic ) {
+ vboTexCoordsG = iVBO.addGLSLSubArray("svr_TexCoordG", 2, GL.GL_ARRAY_BUFFER);
+ vboTexCoordsB = iVBO.addGLSLSubArray("svr_TexCoordB", 2, GL.GL_ARRAY_BUFFER);
+ } else {
+ vboTexCoordsG = null;
+ vboTexCoordsB = null;
+ }
+ indices = GLArrayDataServer.createData(1, GL.GL_SHORT, indexCount, GL.GL_STATIC_DRAW, GL.GL_ELEMENT_ARRAY_BUFFER);
+
+ /** 2+2+2+2+2: { vec2 position, vec2 color, vec2 texCoordR, vec2 texCoordG, vec2 texCoordB } */
+ final FloatBuffer iVBOFB = (FloatBuffer)iVBO.getBuffer();
+
+ for ( int vertNum = 0; vertNum < vertexCount; vertNum++ ) {
+ final DistortionMesh.DistortionVertex v = meshData.vertices[vertNum];
+ int dataIdx = 0;
+
+ if( StereoDevice.DUMP_DATA ) {
+ System.err.println("XXX."+eyeName+": START VERTEX "+vertNum+" / "+vertexCount);
+ }
+ // pos
+ if( v.pos_size >= 2 ) {
+ if( StereoDevice.DUMP_DATA ) {
+ System.err.println("XXX."+eyeName+": pos ["+v.data[dataIdx]+", "+v.data[dataIdx+1]+"]");
+ }
+ iVBOFB.put(v.data[dataIdx]);
+ iVBOFB.put(v.data[dataIdx+1]);
+ } else {
+ iVBOFB.put(0f);
+ iVBOFB.put(0f);
+ }
+ dataIdx += v.pos_size;
+
+ // params
+ if( v.vignetteFactor_size >= 1 && useVignette ) {
+ if( StereoDevice.DUMP_DATA ) {
+ System.err.println("XXX."+eyeName+": vignette "+v.data[dataIdx]);
+ }
+ iVBOFB.put(v.data[dataIdx]);
+ } else {
+ iVBOFB.put(1.0f);
+ }
+ dataIdx += v.vignetteFactor_size;
+
+ if( v.timewarpFactor_size >= 1 ) {
+ if( StereoDevice.DUMP_DATA ) {
+ System.err.println("XXX."+eyeName+": timewarp "+v.data[dataIdx]);
+ }
+ iVBOFB.put(v.data[dataIdx]);
+ } else {
+ iVBOFB.put(1.0f);
+ }
+ dataIdx += v.timewarpFactor_size;
+
+ // texCoordR
+ if( v.texR_size >= 2 ) {
+ if( StereoDevice.DUMP_DATA ) {
+ System.err.println("XXX."+eyeName+": texR ["+v.data[dataIdx]+", "+v.data[dataIdx+1]+"]");
+ }
+ iVBOFB.put(v.data[dataIdx]);
+ iVBOFB.put(v.data[dataIdx+1]);
+ } else {
+ iVBOFB.put(1f);
+ iVBOFB.put(1f);
+ }
+ dataIdx += v.texR_size;
+
+ if( useChromatic ) {
+ // texCoordG
+ if( v.texG_size >= 2 ) {
+ if( StereoDevice.DUMP_DATA ) {
+ System.err.println("XXX."+eyeName+": texG ["+v.data[dataIdx]+", "+v.data[dataIdx+1]+"]");
+ }
+ iVBOFB.put(v.data[dataIdx]);
+ iVBOFB.put(v.data[dataIdx+1]);
+ } else {
+ iVBOFB.put(1f);
+ iVBOFB.put(1f);
+ }
+ dataIdx += v.texG_size;
+
+ // texCoordB
+ if( v.texB_size >= 2 ) {
+ if( StereoDevice.DUMP_DATA ) {
+ System.err.println("XXX."+eyeName+": texB ["+v.data[dataIdx]+", "+v.data[dataIdx+1]+"]");
+ }
+ iVBOFB.put(v.data[dataIdx]);
+ iVBOFB.put(v.data[dataIdx+1]);
+ } else {
+ iVBOFB.put(1f);
+ iVBOFB.put(1f);
+ }
+ dataIdx += v.texB_size;
+ } else {
+ dataIdx += v.texG_size;
+ dataIdx += v.texB_size;
+ }
+ }
+ if( StereoDevice.DUMP_DATA ) {
+ System.err.println("XXX."+eyeName+": iVBO "+iVBO);
+ }
+ {
+ if( StereoDevice.DUMP_DATA ) {
+ System.err.println("XXX."+eyeName+": idx "+indices+", count "+indexCount);
+ for(int i=0; i< indexCount; i++) {
+ if( 0 == i % 16 ) {
+ System.err.printf("%n%5d: ", i);
+ }
+ System.err.printf("%5d, ", (int)meshData.indices[i]);
+ }
+ System.err.println();
+ }
+ final ShortBuffer out = (ShortBuffer) indices.getBuffer();
+ out.put(meshData.indices, 0, meshData.indexCount);
+ }
+ if( StereoDevice.DEBUG ) {
+ System.err.println("XXX."+eyeName+": "+this);
+ }
+ }
+
+ private void linkData(final GL2ES2 gl, final ShaderProgram sp) {
+ if( null == iVBO ) return;
+
+ if( 0 > vboPos.setLocation(gl, sp.program()) ) {
+ throw new GLException("Couldn't locate "+vboPos);
+ }
+ if( 0 > vboParams.setLocation(gl, sp.program()) ) {
+ throw new GLException("Couldn't locate "+vboParams);
+ }
+ if( 0 > vboTexCoordsR.setLocation(gl, sp.program()) ) {
+ throw new GLException("Couldn't locate "+vboTexCoordsR);
+ }
+ if( StereoUtil.usesChromaticDistortion(distortionBits) ) {
+ if( 0 > vboTexCoordsG.setLocation(gl, sp.program()) ) {
+ throw new GLException("Couldn't locate "+vboTexCoordsG);
+ }
+ if( 0 > vboTexCoordsB.setLocation(gl, sp.program()) ) {
+ throw new GLException("Couldn't locate "+vboTexCoordsB);
+ }
+ }
+ if( 0 > eyeToSourceUVScale.setLocation(gl, sp.program()) ) {
+ throw new GLException("Couldn't locate "+eyeToSourceUVScale);
+ }
+ if( 0 > eyeToSourceUVOffset.setLocation(gl, sp.program()) ) {
+ throw new GLException("Couldn't locate "+eyeToSourceUVOffset);
+ }
+ if( StereoUtil.usesTimewarpDistortion(distortionBits) ) {
+ if( 0 > eyeRotationStart.setLocation(gl, sp.program()) ) {
+ throw new GLException("Couldn't locate "+eyeRotationStart);
+ }
+ if( 0 > eyeRotationEnd.setLocation(gl, sp.program()) ) {
+ throw new GLException("Couldn't locate "+eyeRotationEnd);
+ }
+ }
+ iVBO.seal(gl, true);
+ iVBO.enableBuffer(gl, false);
+ indices.seal(gl, true);
+ indices.enableBuffer(gl, false);
+ }
+
+ private void dispose(final GL2ES2 gl) {
+ if( null == iVBO ) return;
+ iVBO.destroy(gl);
+ indices.destroy(gl);
+ }
+ private void enableVBO(final GL2ES2 gl, final boolean enable) {
+ if( null == iVBO ) return;
+ iVBO.enableBuffer(gl, enable);
+ indices.bindBuffer(gl, enable); // keeps VBO binding if enable:=true
+ }
+
+ private void updateUniform(final GL2ES2 gl, final ShaderProgram sp) {
+ if( null == iVBO ) return;
+ gl.glUniform(eyeToSourceUVScale);
+ gl.glUniform(eyeToSourceUVOffset);
+ if( StereoUtil.usesTimewarpDistortion(distortionBits) ) {
+ gl.glUniform(eyeRotationStart);
+ gl.glUniform(eyeRotationEnd);
+ }
+ }
+
+ /**
+ * Updates {@link #ovrEyePose} and it's extracted
+ * {@link #eyeRenderPoseOrientation} and {@link #eyeRenderPosePosition}.
+ * @param hmdCtx used get the {@link #ovrEyePose} via {@link OVR#ovrHmd_GetEyePose(OvrHmdContext, int)}
+ */
+ private EyePose updateEyePose(final GenericStereoDevice hmdCtx) {
+ return eyePose;
+ }
+
+ @Override
+ public String toString() {
+ final String ppTxt = null == iVBO ? ", no post-processing" :
+ ", uvScale["+eyeToSourceUVScale.floatBufferValue().get(0)+", "+eyeToSourceUVScale.floatBufferValue().get(1)+
+ "], uvOffset["+eyeToSourceUVOffset.floatBufferValue().get(0)+", "+eyeToSourceUVOffset.floatBufferValue().get(1)+"]";
+
+ return "Eye["+eyeName+", viewport "+viewport+
+ ", "+eyeParameter+
+ ", vertices "+vertexCount+", indices "+indexCount+
+ ppTxt+
+ ", desc"+eyeParameter+", "+eyePose+"]";
+ }
+ }
+
+ private final GenericStereoDevice device;
+ private final GenericEye[] eyes;
+ private final int distortionBits;
+ private final int textureCount;
+ private final DimensionImmutable singleTextureSize;
+ private final DimensionImmutable totalTextureSize;
+ /** if texUnit0 is null: no post-processing */
+ private final GLUniformData texUnit0;
+
+ private ShaderProgram sp;
+ private long frameStart = 0;
+
+ @Override
+ public String toString() {
+ return "GenericStereo[distortion["+StereoUtil.distortionBitsToString(distortionBits)+
+ "], singleSize "+singleTextureSize+
+ ", sbsSize "+totalTextureSize+
+ ", texCount "+textureCount+", texUnit "+(null != texUnit0 ? texUnit0.intValue() : "n/a")+
+ ", "+PlatformPropsImpl.NEWLINE+" "+(0 < eyes.length ? eyes[0] : "none")+
+ ", "+PlatformPropsImpl.NEWLINE+" "+(1 < eyes.length ? eyes[1] : "none")+"]";
+ }
+
+
+ private static final DimensionImmutable zeroSize = new Dimension(0, 0);
+
+ /* pp */ GenericStereoDeviceRenderer(final GenericStereoDevice context, final int distortionBits,
+ final int textureCount, final float[] eyePositionOffset,
+ final EyeParameter[] eyeParam, final float pixelsPerDisplayPixel, final int textureUnit,
+ final DimensionImmutable singleTextureSize, final DimensionImmutable totalTextureSize,
+ final RectangleImmutable[] eyeViewports) {
+ this.device = context;
+ this.eyes = new GenericEye[eyeParam.length];
+ this.distortionBits = ( distortionBits | context.getMinimumDistortionBits() ) & context.getSupportedDistortionBits();
+ final boolean usePP = null != device.config.distortionMeshProducer && 0 != this.distortionBits;
+ final DimensionImmutable textureSize;
+
+ if( usePP ) {
+ if( 1 > textureCount || 2 < textureCount ) {
+ this.textureCount = 2;
+ } else {
+ this.textureCount = textureCount;
+ }
+ this.singleTextureSize = singleTextureSize;
+ this.totalTextureSize = totalTextureSize;
+ textureSize = 1 == textureCount ? totalTextureSize : singleTextureSize;
+ texUnit0 = new GLUniformData("svr_Texture0", textureUnit);
+ } else {
+ this.textureCount = 0;
+ this.singleTextureSize = zeroSize;
+ this.totalTextureSize = zeroSize;
+ textureSize = zeroSize;
+ texUnit0 = null;
+ }
+ for(int i=0; i<eyeParam.length; i++) {
+ eyes[i] = new GenericEye(context, this.distortionBits, eyePositionOffset, eyeParam[i], textureSize, eyeViewports[i]);
+ }
+ sp = null;
+ }
+
+ @Override
+ public StereoDevice getDevice() { return device; }
+
+ @Override
+ public final int getDistortionBits() { return distortionBits; }
+
+ @Override
+ public final boolean usesSideBySideStereo() { return true; }
+
+ @Override
+ public final DimensionImmutable getSingleSurfaceSize() { return singleTextureSize; }
+
+ @Override
+ public final DimensionImmutable getTotalSurfaceSize() { return totalTextureSize; }
+
+ @Override
+ public final int getTextureCount() { return textureCount; }
+
+ @Override
+ public final int getTextureUnit() { return ppAvailable() ? texUnit0.intValue() : 0; }
+
+ @Override
+ public final boolean ppAvailable() { return null != texUnit0; }
+
+ @Override
+ public final void init(final GL gl) {
+ if( StereoDevice.DEBUG ) {
+ System.err.println(JoglVersion.getGLInfo(gl, null).toString());
+ }
+ if( null != sp ) {
+ throw new IllegalStateException("Already initialized");
+ }
+ if( !ppAvailable() ) {
+ return;
+ }
+ final GL2ES2 gl2es2 = gl.getGL2ES2();
+
+ final String vertexShaderBasename;
+ final String fragmentShaderBasename;
+ {
+ final boolean usesTimewarp = StereoUtil.usesTimewarpDistortion(distortionBits);
+ final boolean usesChromatic = StereoUtil.usesChromaticDistortion(distortionBits);
+
+ final StringBuilder sb = new StringBuilder();
+ sb.append(shaderPrefix01);
+ if( !usesChromatic && !usesTimewarp ) {
+ sb.append(shaderPlainSuffix);
+ } else if( usesChromatic && !usesTimewarp ) {
+ sb.append(shaderChromaSuffix);
+ } else if( usesTimewarp ) {
+ sb.append(shaderTimewarpSuffix);
+ if( usesChromatic ) {
+ sb.append(shaderChromaSuffix);
+ }
+ }
+ vertexShaderBasename = sb.toString();
+ sb.setLength(0);
+ sb.append(shaderPrefix01);
+ if( usesChromatic ) {
+ sb.append(shaderChromaSuffix);
+ } else {
+ sb.append(shaderPlainSuffix);
+ }
+ fragmentShaderBasename = sb.toString();
+ }
+ final ShaderCode vp0 = ShaderCode.create(gl2es2, GL2ES2.GL_VERTEX_SHADER, GenericStereoDeviceRenderer.class, "shader",
+ "shader/bin", vertexShaderBasename, true);
+ final ShaderCode fp0 = ShaderCode.create(gl2es2, GL2ES2.GL_FRAGMENT_SHADER, GenericStereoDeviceRenderer.class, "shader",
+ "shader/bin", fragmentShaderBasename, true);
+ vp0.defaultShaderCustomization(gl2es2, true, true);
+ fp0.defaultShaderCustomization(gl2es2, true, true);
+
+ sp = new ShaderProgram();
+ sp.add(gl2es2, vp0, System.err);
+ sp.add(gl2es2, fp0, System.err);
+ if(!sp.link(gl2es2, System.err)) {
+ throw new GLException("could not link program: "+sp);
+ }
+ sp.useProgram(gl2es2, true);
+ if( 0 > texUnit0.setLocation(gl2es2, sp.program()) ) {
+ throw new GLException("Couldn't locate "+texUnit0);
+ }
+ for(int i=0; i<eyes.length; i++) {
+ eyes[i].linkData(gl2es2, sp);
+ }
+ sp.useProgram(gl2es2, false);
+ }
+
+ @Override
+ public final void dispose(final GL gl) {
+ final GL2ES2 gl2es2 = gl.getGL2ES2();
+ if( null != sp ) {
+ sp.useProgram(gl2es2, false);
+ }
+ for(int i=0; i<eyes.length; i++) {
+ eyes[i].dispose(gl2es2);
+ }
+ if( null != sp ) {
+ sp.destroy(gl2es2);
+ }
+ }
+
+ @Override
+ public final Eye getEye(final int eyeNum) {
+ return eyes[eyeNum];
+ }
+
+ @Override
+ public final EyePose updateEyePose(final int eyeNum) {
+ return eyes[eyeNum].updateEyePose(device);
+ }
+
+ @Override
+ public final void beginFrame(final GL gl) {
+ frameStart = Platform.currentTimeMillis();
+ }
+
+ @Override
+ public final void endFrame(final GL gl) {
+ if( 0 == frameStart ) {
+ throw new IllegalStateException("beginFrame not called");
+ }
+ frameStart = 0;
+ }
+
+ @Override
+ public final void ppBegin(final GL gl) {
+ if( null == sp ) {
+ throw new IllegalStateException("Not initialized");
+ }
+ if( 0 == frameStart ) {
+ throw new IllegalStateException("beginFrame not called");
+ }
+ final GL2ES2 gl2es2 = gl.getGL2ES2();
+
+ gl.glClearColor(0.0f, 0.0f, 0.0f, 0.0f);
+ gl.glClear(GL.GL_COLOR_BUFFER_BIT);
+ gl.glActiveTexture(GL.GL_TEXTURE0 + getTextureUnit());
+
+ gl2es2.glDisable(GL.GL_CULL_FACE);
+ gl2es2.glDisable(GL.GL_DEPTH_TEST);
+ gl2es2.glDisable(GL.GL_BLEND);
+
+ if( !gl2es2.isGLcore() ) {
+ gl2es2.glEnable(GL.GL_TEXTURE_2D);
+ }
+
+ sp.useProgram(gl2es2, true);
+
+ gl2es2.glUniform(texUnit0);
+ }
+
+ @Override
+ public final void ppOneEye(final GL gl, final int eyeNum) {
+ final GenericEye eye = eyes[eyeNum];
+ final GL2ES2 gl2es2 = gl.getGL2ES2();
+
+ eye.updateUniform(gl2es2, sp);
+ eye.enableVBO(gl2es2, true);
+ gl2es2.glDrawElements(GL.GL_TRIANGLES, eye.indexCount, GL.GL_UNSIGNED_SHORT, 0);
+ eyes[eyeNum].enableVBO(gl2es2, false);
+ }
+
+ @Override
+ public final void ppEnd(final GL gl) {
+ sp.useProgram(gl.getGL2ES2(), false);
+ }
+}
diff --git a/src/jogl/classes/jogamp/opengl/util/stereo/ScaleAndOffset2D.java b/src/jogl/classes/jogamp/opengl/util/stereo/ScaleAndOffset2D.java
new file mode 100644
index 000000000..ce154e03e
--- /dev/null
+++ b/src/jogl/classes/jogamp/opengl/util/stereo/ScaleAndOffset2D.java
@@ -0,0 +1,107 @@
+/**
+ * Copyright 2014 JogAmp Community. All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or without modification, are
+ * permitted provided that the following conditions are met:
+ *
+ * 1. Redistributions of source code must retain the above copyright notice, this list of
+ * conditions and the following disclaimer.
+ *
+ * 2. Redistributions in binary form must reproduce the above copyright notice, this list
+ * of conditions and the following disclaimer in the documentation and/or other materials
+ * provided with the distribution.
+ *
+ * THIS SOFTWARE IS PROVIDED BY JogAmp Community ``AS IS'' AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND
+ * FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL JogAmp Community OR
+ * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+ * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
+ * SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON
+ * ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
+ * NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
+ * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ *
+ * The views and conclusions contained in the software and documentation are those of the
+ * authors and should not be interpreted as representing official policies, either expressed
+ * or implied, of JogAmp Community.
+ */
+package jogamp.opengl.util.stereo;
+
+import javax.media.nativewindow.util.DimensionImmutable;
+import javax.media.nativewindow.util.RectangleImmutable;
+
+import com.jogamp.opengl.math.FovHVHalves;
+import com.jogamp.opengl.math.VectorUtil;
+
+/**
+ * 2D scale and offset NDC class,
+ * providing conversion from {@link FovHVHalves} in tangent to NDC space.
+ * <p>
+ * See <a href="https://www.opengl.org/wiki/Compute_eye_space_from_window_space">OpenGL.org: Compute eye space from window space</a>
+ * </p>
+ */
+public final class ScaleAndOffset2D {
+ /** Scale for x- and y-component. */
+ final float[] scale;
+ /** Offset for x- and y-component. */
+ final float[] offset;
+
+ private static final float[] vec2Half = new float[] { 0.5f, 0.5f };
+
+ public String toString() {
+ return "[offset "+offset[0]+" / "+offset[1]+", scale "+scale[0]+" x "+scale[1]+"]";
+ }
+
+ public ScaleAndOffset2D(final float[] scale, final float[] offset) {
+ this.scale = scale;
+ this.offset = offset;
+ }
+
+ /**
+ * Create the <i>Normalized Device Coordinate Space</i> (NDC) [-1,+1] instance
+ * from the given <code>fovHVHalves</code>.
+ */
+ public ScaleAndOffset2D(final FovHVHalves fovHVHalves) {
+ final FovHVHalves tanFovHVHalves = fovHVHalves.toTangents();
+ final float projXScale = 2.0f / ( tanFovHVHalves.left+ tanFovHVHalves.right);
+ final float projYScale = 2.0f / ( tanFovHVHalves.top + tanFovHVHalves.bottom );
+ final float projXOffset = ( tanFovHVHalves.left - tanFovHVHalves.right ) * projXScale * 0.5f;
+ final float projYOffset = ( tanFovHVHalves.top - tanFovHVHalves.bottom ) * projYScale * 0.5f;
+
+ this.scale = new float[] { projXScale, projYScale };
+ this.offset = new float[] { projXOffset, projYOffset };
+ }
+
+ /**
+ * Create the <i>Normalized Device Coordinate Space</i> (NDC) [-1,+1] instance
+ * from the given <code>fovHVHalves</code>, for the subsection of the <code>render-viewport</code> within the <code>rendertarget-size</code>.
+ */
+ public ScaleAndOffset2D(final FovHVHalves fovHVHalves, final DimensionImmutable rendertargetSize, final RectangleImmutable renderViewport) {
+ final ScaleAndOffset2D eyeToSourceNDC = new ScaleAndOffset2D(fovHVHalves);
+ final float[] vec2Tmp1 = new float[2];
+ final float[] vec2Tmp2 = new float[2];
+ final float[] scale = VectorUtil.scaleVec2(vec2Tmp1, eyeToSourceNDC.scale, 0.5f);
+ final float[] offset = VectorUtil.addVec2(vec2Tmp2, VectorUtil.scaleVec2(vec2Tmp2, eyeToSourceNDC.offset, 0.5f), vec2Half);
+
+ final float[] scale2 = new float[] { (float)renderViewport.getWidth() / (float)rendertargetSize.getWidth(),
+ (float)renderViewport.getHeight() / (float)rendertargetSize.getHeight() };
+
+ final float[] offset2 = new float[] { (float)renderViewport.getX() / (float)rendertargetSize.getWidth(),
+ (float)renderViewport.getY() / (float)rendertargetSize.getHeight() };
+
+ VectorUtil.scaleVec2(scale, scale, scale2);
+ VectorUtil.addVec2(offset, VectorUtil.scaleVec2(offset, offset, scale2), offset2);
+
+ this.scale = scale;
+ this.offset = offset;
+ }
+
+ /**
+ * Return the <i>tangent FOV space</i> of this <i>eye to source NDC</i> instance.
+ */
+ public final float[] convertToTanFovSpace(final float[] rendertargetNDC) {
+ final float[] vec2Tmp1 = new float[2];
+ return VectorUtil.divVec2(vec2Tmp1, VectorUtil.subVec2(vec2Tmp1, rendertargetNDC, this.offset), this.scale);
+ }
+
+} \ No newline at end of file
diff --git a/src/jogl/classes/jogamp/opengl/util/stereo/shader/dist01_chroma.fp b/src/jogl/classes/jogamp/opengl/util/stereo/shader/dist01_chroma.fp
new file mode 100644
index 000000000..4ac404729
--- /dev/null
+++ b/src/jogl/classes/jogamp/opengl/util/stereo/shader/dist01_chroma.fp
@@ -0,0 +1,26 @@
+//Copyright 2014 JogAmp Community. All rights reserved.
+
+#if __VERSION__ >= 130
+ #define varying in
+ out vec4 svr_FragColor;
+ #define texture2D texture
+#else
+ #define svr_FragColor gl_FragColor
+#endif
+
+uniform sampler2D svr_Texture0;
+
+varying vec3 svv_Fade;
+varying vec2 svv_TexCoordR;
+varying vec2 svv_TexCoordG;
+varying vec2 svv_TexCoordB;
+
+void main (void)
+{
+ // 3 samples for fixing chromatic aberrations
+ vec3 color = vec3(texture2D(svr_Texture0, svv_TexCoordR).r,
+ texture2D(svr_Texture0, svv_TexCoordG).g,
+ texture2D(svr_Texture0, svv_TexCoordB).b);
+ svr_FragColor = vec4(svv_Fade * color, 1.0); // include vignetteFade
+}
+
diff --git a/src/jogl/classes/jogamp/opengl/util/stereo/shader/dist01_chroma.vp b/src/jogl/classes/jogamp/opengl/util/stereo/shader/dist01_chroma.vp
new file mode 100644
index 000000000..d4ab585d5
--- /dev/null
+++ b/src/jogl/classes/jogamp/opengl/util/stereo/shader/dist01_chroma.vp
@@ -0,0 +1,33 @@
+//Copyright 2014 JogAmp Community. All rights reserved.
+
+#if __VERSION__ >= 130
+ #define attribute in
+ #define varying out
+#endif
+
+uniform vec2 svr_EyeToSourceUVScale;
+uniform vec2 svr_EyeToSourceUVOffset;
+
+attribute vec2 svr_Position;
+attribute vec2 svr_Params;
+attribute vec2 svr_TexCoordR;
+attribute vec2 svr_TexCoordG;
+attribute vec2 svr_TexCoordB;
+
+varying vec3 svv_Fade;
+varying vec2 svv_TexCoordR;
+varying vec2 svv_TexCoordG;
+varying vec2 svv_TexCoordB;
+
+void main(void)
+{
+ gl_Position = vec4(svr_Position.xy, 0.5, 1.0);
+ svv_Fade = vec3(svr_Params.r); // vignetteFade
+
+ svv_TexCoordR = svr_TexCoordR * svr_EyeToSourceUVScale + svr_EyeToSourceUVOffset;
+ svv_TexCoordR.y = 1.0-svv_TexCoordR.y;
+ svv_TexCoordG = svr_TexCoordG * svr_EyeToSourceUVScale + svr_EyeToSourceUVOffset;
+ svv_TexCoordG.y = 1.0-svv_TexCoordG.y;
+ svv_TexCoordB = svr_TexCoordB * svr_EyeToSourceUVScale + svr_EyeToSourceUVOffset;
+ svv_TexCoordB.y = 1.0-svv_TexCoordB.y;
+}
diff --git a/src/jogl/classes/jogamp/opengl/util/stereo/shader/dist01_plain.fp b/src/jogl/classes/jogamp/opengl/util/stereo/shader/dist01_plain.fp
new file mode 100644
index 000000000..2df890648
--- /dev/null
+++ b/src/jogl/classes/jogamp/opengl/util/stereo/shader/dist01_plain.fp
@@ -0,0 +1,22 @@
+//Copyright 2014 JogAmp Community. All rights reserved.
+
+#if __VERSION__ >= 130
+ #define varying in
+ out vec4 svr_FragColor;
+ #define texture2D texture
+#else
+ #define svr_FragColor gl_FragColor
+#endif
+
+uniform sampler2D svr_Texture0;
+
+varying vec3 svv_Fade;
+varying vec2 svv_TexCoordR;
+
+void main (void)
+{
+ // 3 samples for fixing chromatic aberrations
+ vec3 color = texture2D(svr_Texture0, svv_TexCoordR).rgb;
+ svr_FragColor = vec4(svv_Fade * color, 1.0); // include vignetteFade
+}
+
diff --git a/src/jogl/classes/jogamp/opengl/util/stereo/shader/dist01_plain.vp b/src/jogl/classes/jogamp/opengl/util/stereo/shader/dist01_plain.vp
new file mode 100644
index 000000000..335d3f0f6
--- /dev/null
+++ b/src/jogl/classes/jogamp/opengl/util/stereo/shader/dist01_plain.vp
@@ -0,0 +1,27 @@
+//Copyright 2014 JogAmp Community. All rights reserved.
+
+#if __VERSION__ >= 130
+ #define attribute in
+ #define varying out
+#endif
+
+uniform vec2 svr_EyeToSourceUVScale;
+uniform vec2 svr_EyeToSourceUVOffset;
+
+attribute vec2 svr_Position;
+attribute vec2 svr_Params;
+attribute vec2 svr_TexCoordR;
+
+varying vec3 svv_Fade;
+varying vec2 svv_TexCoordR;
+
+void main(void)
+{
+ gl_Position = vec4(svr_Position.xy, 0.5, 1.0);
+ svv_Fade = vec3(svr_Params.r); // vignetteFade
+
+ // Vertex inputs are in TanEyeAngle space for the R,G,B channels (i.e. after chromatic aberration and distortion).
+ // Scale them into the correct [0-1],[0-1] UV lookup space (depending on eye)
+ svv_TexCoordR = svr_TexCoordR * svr_EyeToSourceUVScale + svr_EyeToSourceUVOffset;
+ svv_TexCoordR.y = 1.0-svv_TexCoordR.y;
+}
diff --git a/src/jogl/classes/jogamp/opengl/util/stereo/shader/dist01_timewarp.vp b/src/jogl/classes/jogamp/opengl/util/stereo/shader/dist01_timewarp.vp
new file mode 100644
index 000000000..c4461ec3e
--- /dev/null
+++ b/src/jogl/classes/jogamp/opengl/util/stereo/shader/dist01_timewarp.vp
@@ -0,0 +1,44 @@
+//Copyright 2014 JogAmp Community. All rights reserved.
+
+#if __VERSION__ >= 130
+ #define attribute in
+ #define varying out
+#endif
+
+uniform vec2 svr_EyeToSourceUVScale;
+uniform vec2 svr_EyeToSourceUVOffset;
+uniform mat4 svr_EyeRotationStart;
+uniform mat4 svr_EyeRotationEnd;
+
+attribute vec2 svr_Position;
+attribute vec2 svr_Params;
+attribute vec2 svr_TexCoordR;
+
+varying vec3 svv_Fade;
+varying vec2 svv_TexCoordR;
+
+void main(void)
+{
+ gl_Position = vec4(svr_Position.xy, 0.0, 1.0);
+ svv_Fade = vec3(svr_Params.r); // vignetteFade
+
+ // Vertex inputs are in TanEyeAngle space for the R,G,B channels (i.e. after chromatic aberration and distortion).
+ // These are now "real world" vectors in direction (x,y,1) relative to the eye of the HMD.
+ vec3 TanEyeAngle = vec3 ( svr_TexCoordR, 1.0 );
+
+ // Accurate time warp lerp vs. faster
+ // Apply the two 3x3 timewarp rotations to these vectors.
+ vec3 TransformedStart = (svr_EyeRotationStart * vec4(TanEyeAngle, 0)).xyz;
+ vec3 TransformedEnd = (svr_EyeRotationEnd * vec4(TanEyeAngle, 0)).xyz;
+ // And blend between them.
+ vec3 Transformed = mix ( TransformedStart, TransformedEnd, svr_Params.g /* timewarpLerpFactor */ );
+
+ // Project them back onto the Z=1 plane of the rendered images.
+ float RecipZ = 1.0 / Transformed.z;
+ vec2 Flattened = vec2 ( Transformed.x * RecipZ, Transformed.y * RecipZ );
+
+ // These are now still in TanEyeAngle space.
+ // Scale them into the correct [0-1],[0-1] UV lookup space (depending on eye)
+ svv_TexCoordR = Flattened * svr_EyeToSourceUVScale + svr_EyeToSourceUVOffset;
+ svv_TexCoordR.y = 1.0-svv_TexCoordR.y;
+}
diff --git a/src/jogl/classes/jogamp/opengl/util/stereo/shader/dist01_timewarp_chroma.vp b/src/jogl/classes/jogamp/opengl/util/stereo/shader/dist01_timewarp_chroma.vp
new file mode 100644
index 000000000..c08ed3113
--- /dev/null
+++ b/src/jogl/classes/jogamp/opengl/util/stereo/shader/dist01_timewarp_chroma.vp
@@ -0,0 +1,65 @@
+//Copyright 2014 JogAmp Community. All rights reserved.
+
+#if __VERSION__ >= 130
+ #define attribute in
+ #define varying out
+#endif
+
+uniform vec2 svr_EyeToSourceUVScale;
+uniform vec2 svr_EyeToSourceUVOffset;
+uniform mat4 svr_EyeRotationStart;
+uniform mat4 svr_EyeRotationEnd;
+
+attribute vec2 svr_Position;
+attribute vec2 svr_Params;
+attribute vec2 svr_TexCoordR;
+attribute vec2 svr_TexCoordG;
+attribute vec2 svr_TexCoordB;
+
+varying vec3 svv_Fade;
+varying vec2 svv_TexCoordR;
+varying vec2 svv_TexCoordG;
+varying vec2 svv_TexCoordB;
+
+void main(void)
+{
+ gl_Position = vec4(svr_Position.xy, 0.0, 1.0);
+ svv_Fade = vec3(svr_Params.r); // vignetteFade
+
+ // Vertex inputs are in TanEyeAngle space for the R,G,B channels (i.e. after chromatic aberration and distortion).
+ // These are now "real world" vectors in direction (x,y,1) relative to the eye of the HMD.
+ vec3 TanEyeAngleR = vec3 ( svr_TexCoordR, 1.0 );
+ vec3 TanEyeAngleG = vec3 ( svr_TexCoordG, 1.0 );
+ vec3 TanEyeAngleB = vec3 ( svr_TexCoordB, 1.0 );
+
+ // Accurate time warp lerp vs. faster
+ // Apply the two 3x3 timewarp rotations to these vectors.
+ vec3 TransformedRStart = (svr_EyeRotationStart * vec4(TanEyeAngleR, 0)).xyz;
+ vec3 TransformedGStart = (svr_EyeRotationStart * vec4(TanEyeAngleG, 0)).xyz;
+ vec3 TransformedBStart = (svr_EyeRotationStart * vec4(TanEyeAngleB, 0)).xyz;
+ vec3 TransformedREnd = (svr_EyeRotationEnd * vec4(TanEyeAngleR, 0)).xyz;
+ vec3 TransformedGEnd = (svr_EyeRotationEnd * vec4(TanEyeAngleG, 0)).xyz;
+ vec3 TransformedBEnd = (svr_EyeRotationEnd * vec4(TanEyeAngleB, 0)).xyz;
+
+ // And blend between them.
+ vec3 TransformedR = mix ( TransformedRStart, TransformedREnd, svr_Params.g /* timewarpLerpFactor */ );
+ vec3 TransformedG = mix ( TransformedGStart, TransformedGEnd, svr_Params.g /* timewarpLerpFactor */ );
+ vec3 TransformedB = mix ( TransformedBStart, TransformedBEnd, svr_Params.g /* timewarpLerpFactor */ );
+
+ // Project them back onto the Z=1 plane of the rendered images.
+ float RecipZR = 1.0 / TransformedR.z;
+ float RecipZG = 1.0 / TransformedG.z;
+ float RecipZB = 1.0 / TransformedB.z;
+ vec2 FlattenedR = vec2 ( TransformedR.x * RecipZR, TransformedR.y * RecipZR );
+ vec2 FlattenedG = vec2 ( TransformedG.x * RecipZG, TransformedG.y * RecipZG );
+ vec2 FlattenedB = vec2 ( TransformedB.x * RecipZB, TransformedB.y * RecipZB );
+
+ // These are now still in TanEyeAngle space.
+ // Scale them into the correct [0-1],[0-1] UV lookup space (depending on eye)
+ svv_TexCoordR = FlattenedR * svr_EyeToSourceUVScale + svr_EyeToSourceUVOffset;
+ svv_TexCoordR.y = 1.0-svv_TexCoordR.y;
+ svv_TexCoordG = FlattenedG * svr_EyeToSourceUVScale + svr_EyeToSourceUVOffset;
+ svv_TexCoordG.y = 1.0-svv_TexCoordG.y;
+ svv_TexCoordB = FlattenedB * svr_EyeToSourceUVScale + svr_EyeToSourceUVOffset;
+ svv_TexCoordB.y = 1.0-svv_TexCoordB.y;
+}
diff --git a/src/oculusvr/classes/jogamp/opengl/oculusvr/OVRStereoDevice.java b/src/oculusvr/classes/jogamp/opengl/oculusvr/OVRStereoDevice.java
index 09a348c46..2832012e4 100644
--- a/src/oculusvr/classes/jogamp/opengl/oculusvr/OVRStereoDevice.java
+++ b/src/oculusvr/classes/jogamp/opengl/oculusvr/OVRStereoDevice.java
@@ -42,19 +42,50 @@ import com.jogamp.oculusvr.ovrSizei;
import com.jogamp.opengl.math.FovHVHalves;
import com.jogamp.opengl.util.stereo.StereoDevice;
import com.jogamp.opengl.util.stereo.StereoDeviceRenderer;
+import com.jogamp.opengl.util.stereo.StereoUtil;
public class OVRStereoDevice implements StereoDevice {
+ /** 1.6 up, 5 forward */
+ private static final float[] DEFAULT_EYE_POSITION_OFFSET = { 0.0f, 1.6f, -5.0f };
+
public final OvrHmdContext handle;
public final int deviceIndex;
public final ovrHmdDesc hmdDesc;
+ private final FovHVHalves[] defaultEyeFov;
private boolean sensorsStarted = false;
+ private final int[] eyeRenderOrder;
+ private final int supportedDistortionBits, recommendedDistortionBits, minimumDistortionBits;
public OVRStereoDevice(final OvrHmdContext nativeContext, final int deviceIndex) {
this.handle = nativeContext;
this.deviceIndex = deviceIndex;
this.hmdDesc = ovrHmdDesc.create();
OVR.ovrHmd_GetDesc(handle, hmdDesc);
+ final ovrFovPort[] defaultOVREyeFov = hmdDesc.getDefaultEyeFov(0, new ovrFovPort[hmdDesc.getEyeRenderOrderArrayLength()]);
+ defaultEyeFov = new FovHVHalves[defaultOVREyeFov.length];
+ for(int i=0; i<defaultEyeFov.length; i++) {
+ defaultEyeFov[i] = OVRUtil.getFovHV(defaultOVREyeFov[i]);
+ }
+ eyeRenderOrder = new int[hmdDesc.getEyeRenderOrderArrayLength()];
+ hmdDesc.getEyeRenderOrder(0, eyeRenderOrder);
+ supportedDistortionBits = OVRUtil.ovrDistCaps2DistBits(hmdDesc.getDistortionCaps());
+ recommendedDistortionBits = supportedDistortionBits & ~StereoDeviceRenderer.DISTORTION_TIMEWARP;
+ minimumDistortionBits = StereoDeviceRenderer.DISTORTION_BARREL;
+ }
+
+ @Override
+ public final String toString() {
+ final StringBuilder sb = new StringBuilder();
+ sb.append("OVRStereoDevice[product "+hmdDesc.getProductNameAsString());
+ sb.append(", vendor "+hmdDesc.getManufacturerAsString());
+ sb.append(", device "+hmdDesc.getDisplayDeviceNameAsString());
+ sb.append(", surfaceSize "+getSurfaceSize());
+ sb.append(", surfacePos "+getPosition());
+ sb.append(", distortionBits[supported ["+StereoUtil.distortionBitsToString(getSupportedDistortionBits())+
+ "], recommended ["+StereoUtil.distortionBitsToString(getRecommendedDistortionBits())+
+ "], minimum ["+StereoUtil.distortionBitsToString(getMinimumDistortionBits())+"]]]");
+ return sb.toString();
}
@Override
@@ -73,12 +104,13 @@ public class OVRStereoDevice implements StereoDevice {
}
@Override
+ public float[] getDefaultEyePositionOffset() {
+ return DEFAULT_EYE_POSITION_OFFSET;
+ }
+
+ @Override
public final FovHVHalves[] getDefaultFOV() {
- final ovrFovPort[] defaultEyeFov = hmdDesc.getDefaultEyeFov(0, new ovrFovPort[2]);
- final FovHVHalves[] eyeFov = new FovHVHalves[2];
- eyeFov[0] = OVRUtil.getFovHV(defaultEyeFov[0]);
- eyeFov[1] = OVRUtil.getFovHV(defaultEyeFov[1]);
- return eyeFov;
+ return defaultEyeFov;
}
@Override
@@ -104,7 +136,27 @@ public class OVRStereoDevice implements StereoDevice {
}
}
@Override
- public boolean getSensorsStarted() { return sensorsStarted; }
+ public final boolean getSensorsStarted() { return sensorsStarted; }
+
+ @Override
+ public final int[] getEyeRenderOrder() {
+ return eyeRenderOrder;
+ }
+
+ @Override
+ public final int getSupportedDistortionBits() {
+ return supportedDistortionBits;
+ };
+
+ @Override
+ public final int getRecommendedDistortionBits() {
+ return recommendedDistortionBits;
+ }
+
+ @Override
+ public final int getMinimumDistortionBits() {
+ return minimumDistortionBits;
+ }
@Override
public final StereoDeviceRenderer createRenderer(final int distortionBits,
diff --git a/src/oculusvr/classes/jogamp/opengl/oculusvr/OVRStereoDeviceFactory.java b/src/oculusvr/classes/jogamp/opengl/oculusvr/OVRStereoDeviceFactory.java
index 06454e443..ebd3699e1 100644
--- a/src/oculusvr/classes/jogamp/opengl/oculusvr/OVRStereoDeviceFactory.java
+++ b/src/oculusvr/classes/jogamp/opengl/oculusvr/OVRStereoDeviceFactory.java
@@ -31,6 +31,7 @@ import com.jogamp.oculusvr.OVR;
import com.jogamp.oculusvr.OVRVersion;
import com.jogamp.oculusvr.OvrHmdContext;
import com.jogamp.opengl.util.stereo.StereoDevice;
+import com.jogamp.opengl.util.stereo.StereoDevice.Config;
import com.jogamp.opengl.util.stereo.StereoDeviceFactory;
public class OVRStereoDeviceFactory extends StereoDeviceFactory {
@@ -40,7 +41,7 @@ public class OVRStereoDeviceFactory extends StereoDeviceFactory {
}
@Override
- public final StereoDevice createDevice(final int deviceIndex, final boolean verbose) {
+ public final StereoDevice createDevice(final int deviceIndex, Config config, final boolean verbose) {
final OvrHmdContext hmdCtx = OVR.ovrHmd_Create(deviceIndex);
final OVRStereoDevice ctx = new OVRStereoDevice(hmdCtx, deviceIndex);
if( verbose ) {
diff --git a/src/oculusvr/classes/jogamp/opengl/oculusvr/OVRStereoDeviceRenderer.java b/src/oculusvr/classes/jogamp/opengl/oculusvr/OVRStereoDeviceRenderer.java
index 012ad183d..39736217c 100644
--- a/src/oculusvr/classes/jogamp/opengl/oculusvr/OVRStereoDeviceRenderer.java
+++ b/src/oculusvr/classes/jogamp/opengl/oculusvr/OVRStereoDeviceRenderer.java
@@ -67,7 +67,7 @@ import com.jogamp.opengl.util.stereo.StereoDeviceRenderer;
import com.jogamp.opengl.util.stereo.StereoUtil;
/**
- * OculusVR Distortion Data and OpenGL Renderer Utility
+ * OculusVR Stereo Device Distortion and OpenGL Renderer Utility
*/
public class OVRStereoDeviceRenderer implements StereoDeviceRenderer {
private static final String shaderPrefix01 = "dist01";
@@ -140,12 +140,34 @@ public class OVRStereoDeviceRenderer implements StereoDeviceRenderer {
updateEyePose(hmdCtx); // 1st init
+ // Setup: eyeToSourceUVScale, eyeToSourceUVOffset
+ {
+ final ovrVector2f[] uvScaleOffsetOut = new ovrVector2f[2];
+ uvScaleOffsetOut[0] = ovrVector2f.create(); // FIXME: remove ctor / double check
+ uvScaleOffsetOut[1] = ovrVector2f.create();
+
+ final ovrRecti ovrEyeRenderViewport = OVRUtil.createOVRRecti(eyeViewport);
+ OVR.ovrHmd_GetRenderScaleAndOffset(ovrEyeFov, ovrTextureSize, ovrEyeRenderViewport, uvScaleOffsetOut);
+ if( StereoDevice.DEBUG ) {
+ System.err.println("XXX."+eyeName+": eyeParam "+eyeParameter);
+ System.err.println("XXX."+eyeName+": uvScale "+OVRUtil.toString(uvScaleOffsetOut[0]));
+ System.err.println("XXX."+eyeName+": uvOffset "+OVRUtil.toString(uvScaleOffsetOut[1]));
+ System.err.println("XXX."+eyeName+": textureSize "+OVRUtil.toString(ovrTextureSize));
+ System.err.println("XXX."+eyeName+": viewport "+OVRUtil.toString(ovrEyeRenderViewport));
+ }
+ final FloatBuffer eyeToSourceUVScaleFB = eyeToSourceUVScale.floatBufferValue();
+ eyeToSourceUVScaleFB.put(0, uvScaleOffsetOut[0].getX());
+ eyeToSourceUVScaleFB.put(1, uvScaleOffsetOut[0].getY());
+ final FloatBuffer eyeToSourceUVOffsetFB = eyeToSourceUVOffset.floatBufferValue();
+ eyeToSourceUVOffsetFB.put(0, uvScaleOffsetOut[1].getX());
+ eyeToSourceUVOffsetFB.put(1, uvScaleOffsetOut[1].getY());
+ }
+
final ovrDistortionMesh meshData = ovrDistortionMesh.create();
- final ovrFovPort fov = eyeDesc.getFov();
final int ovrDistortionCaps = distBits2OVRDistCaps(distortionBits);
- if( !OVR.ovrHmd_CreateDistortionMesh(hmdCtx, eyeName, fov, ovrDistortionCaps, meshData) ) {
- throw new OVRException("Failed to create meshData for eye "+eyeName+", "+OVRUtil.toString(fov)+" and "+StereoUtil.distortionBitsToString(distortionBits));
+ if( !OVR.ovrHmd_CreateDistortionMesh(hmdCtx, eyeName, ovrEyeFov, ovrDistortionCaps, meshData) ) {
+ throw new OVRException("Failed to create meshData for eye "+eyeName+", "+OVRUtil.toString(ovrEyeFov)+" and "+StereoUtil.distortionBitsToString(distortionBits));
}
vertexCount = meshData.getVertexCount();
indexCount = meshData.getIndexCount();
@@ -168,29 +190,6 @@ public class OVRStereoDeviceRenderer implements StereoDeviceRenderer {
}
indices = GLArrayDataServer.createData(1, GL.GL_SHORT, indexCount, GL.GL_STATIC_DRAW, GL.GL_ELEMENT_ARRAY_BUFFER);
- // Setup: eyeToSourceUVScale, eyeToSourceUVOffset
- {
- final ovrVector2f[] uvScaleOffsetOut = new ovrVector2f[2];
- uvScaleOffsetOut[0] = ovrVector2f.create(); // FIXME: remove ctor / double check
- uvScaleOffsetOut[1] = ovrVector2f.create();
-
- final ovrRecti ovrEyeRenderViewport = OVRUtil.createOVRRecti(eyeViewport);
- OVR.ovrHmd_GetRenderScaleAndOffset(fov, ovrTextureSize, ovrEyeRenderViewport, uvScaleOffsetOut);
- if( StereoDevice.DEBUG ) {
- System.err.println("XXX."+eyeName+": fov "+OVRUtil.toString(fov));
- System.err.println("XXX."+eyeName+": uvScale "+OVRUtil.toString(uvScaleOffsetOut[0]));
- System.err.println("XXX."+eyeName+": uvOffset "+OVRUtil.toString(uvScaleOffsetOut[1]));
- System.err.println("XXX."+eyeName+": textureSize "+OVRUtil.toString(ovrTextureSize));
- System.err.println("XXX."+eyeName+": viewport "+OVRUtil.toString(ovrEyeRenderViewport));
- }
- final FloatBuffer eyeToSourceUVScaleFB = eyeToSourceUVScale.floatBufferValue();
- eyeToSourceUVScaleFB.put(0, uvScaleOffsetOut[0].getX());
- eyeToSourceUVScaleFB.put(1, uvScaleOffsetOut[0].getY());
- final FloatBuffer eyeToSourceUVOffsetFB = eyeToSourceUVOffset.floatBufferValue();
- eyeToSourceUVOffsetFB.put(0, uvScaleOffsetOut[1].getX());
- eyeToSourceUVOffsetFB.put(1, uvScaleOffsetOut[1].getY());
- }
-
/** 2+2+2+2+2: { vec2 position, vec2 color, vec2 texCoordR, vec2 texCoordG, vec2 texCoordB } */
final FloatBuffer iVBOFB = (FloatBuffer)iVBO.getBuffer();
final ovrDistortionVertex[] ovRes = new ovrDistortionVertex[1];
@@ -200,46 +199,76 @@ public class OVRStereoDeviceRenderer implements StereoDeviceRenderer {
final ovrDistortionVertex ov = meshData.getPVertexData(vertNum, ovRes)[0];
ovrVector2f v;
+ if( StereoDevice.DUMP_DATA ) {
+ System.err.println("XXX."+eyeName+": START VERTEX "+vertNum+" / "+vertexCount);
+ }
// pos
v = ov.getPos();
+ if( StereoDevice.DUMP_DATA ) {
+ System.err.println("XXX."+eyeName+": pos "+OVRUtil.toString(v));
+ }
iVBOFB.put(v.getX());
iVBOFB.put(v.getY());
// params
if( useVignette ) {
+ if( StereoDevice.DUMP_DATA ) {
+ System.err.println("XXX."+eyeName+": vignette "+ov.getVignetteFactor());
+ }
iVBOFB.put(ov.getVignetteFactor());
} else {
iVBOFB.put(1.0f);
}
+ if( StereoDevice.DUMP_DATA ) {
+ System.err.println("XXX."+eyeName+": timewarp "+ov.getTimeWarpFactor());
+ }
iVBOFB.put(ov.getTimeWarpFactor());
// texCoordR
v = ov.getTexR();
+ if( StereoDevice.DUMP_DATA ) {
+ System.err.println("XXX."+eyeName+": texR "+OVRUtil.toString(v));
+ }
iVBOFB.put(v.getX());
iVBOFB.put(v.getY());
if( useChromatic ) {
// texCoordG
v = ov.getTexG();
+ if( StereoDevice.DUMP_DATA ) {
+ System.err.println("XXX."+eyeName+": texG "+OVRUtil.toString(v));
+ }
iVBOFB.put(v.getX());
iVBOFB.put(v.getY());
// texCoordB
v = ov.getTexB();
+ if( StereoDevice.DUMP_DATA ) {
+ System.err.println("XXX."+eyeName+": texB "+OVRUtil.toString(v));
+ }
iVBOFB.put(v.getX());
iVBOFB.put(v.getY());
}
}
- if( StereoDevice.DEBUG ) {
+ if( StereoDevice.DUMP_DATA ) {
System.err.println("XXX."+eyeName+": iVBO "+iVBO);
}
{
final ShortBuffer in = meshData.getPIndexData();
+ if( StereoDevice.DUMP_DATA ) {
+ System.err.println("XXX."+eyeName+": idx "+indices+", count "+indexCount);
+ for(int i=0; i< indexCount; i++) {
+ if( 0 == i % 16 ) {
+ System.err.printf("%n%5d: ", i);
+ }
+ System.err.printf("%5d, ", (int)in.get(i));
+ }
+ System.err.println();
+ }
final ShortBuffer out = (ShortBuffer) indices.getBuffer();
out.put(in);
}
if( StereoDevice.DEBUG ) {
- System.err.println("XXX."+eyeName+": idx "+indices);
System.err.println("XXX."+eyeName+": "+this);
}
OVR.ovrHmd_DestroyDistortionMesh(meshData);
@@ -247,34 +276,34 @@ public class OVRStereoDeviceRenderer implements StereoDeviceRenderer {
private void linkData(final GL2ES2 gl, final ShaderProgram sp) {
if( 0 > vboPos.setLocation(gl, sp.program()) ) {
- throw new OVRException("Couldn't locate "+vboPos);
+ throw new GLException("Couldn't locate "+vboPos);
}
if( 0 > vboParams.setLocation(gl, sp.program()) ) {
- throw new OVRException("Couldn't locate "+vboParams);
+ throw new GLException("Couldn't locate "+vboParams);
}
if( 0 > vboTexCoordsR.setLocation(gl, sp.program()) ) {
- throw new OVRException("Couldn't locate "+vboTexCoordsR);
+ throw new GLException("Couldn't locate "+vboTexCoordsR);
}
if( StereoUtil.usesChromaticDistortion(distortionBits) ) {
if( 0 > vboTexCoordsG.setLocation(gl, sp.program()) ) {
- throw new OVRException("Couldn't locate "+vboTexCoordsG);
+ throw new GLException("Couldn't locate "+vboTexCoordsG);
}
if( 0 > vboTexCoordsB.setLocation(gl, sp.program()) ) {
- throw new OVRException("Couldn't locate "+vboTexCoordsB);
+ throw new GLException("Couldn't locate "+vboTexCoordsB);
}
}
if( 0 > eyeToSourceUVScale.setLocation(gl, sp.program()) ) {
- throw new OVRException("Couldn't locate "+eyeToSourceUVScale);
+ throw new GLException("Couldn't locate "+eyeToSourceUVScale);
}
if( 0 > eyeToSourceUVOffset.setLocation(gl, sp.program()) ) {
- throw new OVRException("Couldn't locate "+eyeToSourceUVOffset);
+ throw new GLException("Couldn't locate "+eyeToSourceUVOffset);
}
if( StereoUtil.usesTimewarpDistortion(distortionBits) ) {
if( 0 > eyeRotationStart.setLocation(gl, sp.program()) ) {
- throw new OVRException("Couldn't locate "+eyeRotationStart);
+ throw new GLException("Couldn't locate "+eyeRotationStart);
}
if( 0 > eyeRotationEnd.setLocation(gl, sp.program()) ) {
- throw new OVRException("Couldn't locate "+eyeRotationEnd);
+ throw new GLException("Couldn't locate "+eyeRotationEnd);
}
}
iVBO.seal(gl, true);
@@ -368,17 +397,17 @@ public class OVRStereoDeviceRenderer implements StereoDeviceRenderer {
private static int distBits2OVRDistCaps(final int distortionBits) {
- int bits = 0;
+ int caps = 0;
if( StereoUtil.usesTimewarpDistortion(distortionBits) ) {
- bits |= OVR.ovrDistortionCap_TimeWarp;
+ caps |= OVR.ovrDistortionCap_TimeWarp;
}
if( StereoUtil.usesChromaticDistortion(distortionBits) ) {
- bits |= OVR.ovrDistortionCap_Chromatic;
+ caps |= OVR.ovrDistortionCap_Chromatic;
}
if( StereoUtil.usesVignetteDistortion(distortionBits) ) {
- bits |= OVR.ovrDistortionCap_Vignette;
+ caps |= OVR.ovrDistortionCap_Vignette;
}
- return bits;
+ return caps;
}
/* pp */ OVRStereoDeviceRenderer(final OVRStereoDevice context, final int distortionBits,
@@ -390,7 +419,7 @@ public class OVRStereoDeviceRenderer implements StereoDeviceRenderer {
}
this.context = context;
this.eyes = new OVREye[2];
- this.distortionBits = distortionBits | StereoDeviceRenderer.DISTORTION_BARREL /* always */;
+ this.distortionBits = ( distortionBits | context.getMinimumDistortionBits() ) & context.getSupportedDistortionBits();
this.textureCount = textureCount;
this.singleTextureSize = singleTextureSize;
this.totalTextureSize = totalTextureSize;
@@ -428,7 +457,7 @@ public class OVRStereoDeviceRenderer implements StereoDeviceRenderer {
public final int getTextureUnit() { return texUnit0.intValue(); }
@Override
- public final boolean ppRequired() { return true; }
+ public final boolean ppAvailable() { return 0 != distortionBits; }
@Override
public final void init(final GL gl) {
@@ -555,21 +584,6 @@ public class OVRStereoDeviceRenderer implements StereoDeviceRenderer {
}
@Override
- public final void ppBothEyes(final GL gl) {
- final GL2ES2 gl2es2 = gl.getGL2ES2();
- for(int eyeNum=0; eyeNum<2; eyeNum++) {
- final OVREye eye = eyes[eyeNum];
- if( StereoUtil.usesTimewarpDistortion(distortionBits) ) {
- eye.updateTimewarp(context.handle, eye.ovrEyePose, mat4Tmp1, mat4Tmp2);
- }
- eye.updateUniform(gl2es2, sp);
- eye.enableVBO(gl2es2, true);
- gl2es2.glDrawElements(GL.GL_TRIANGLES, eye.indexCount, GL.GL_UNSIGNED_SHORT, 0);
- eyes[eyeNum].enableVBO(gl2es2, false);
- }
- }
-
- @Override
public final void ppOneEye(final GL gl, final int eyeNum) {
final OVREye eye = eyes[eyeNum];
if( StereoUtil.usesTimewarpDistortion(distortionBits) ) {
diff --git a/src/oculusvr/classes/jogamp/opengl/oculusvr/OVRUtil.java b/src/oculusvr/classes/jogamp/opengl/oculusvr/OVRUtil.java
index 48222ea97..44ec728fc 100644
--- a/src/oculusvr/classes/jogamp/opengl/oculusvr/OVRUtil.java
+++ b/src/oculusvr/classes/jogamp/opengl/oculusvr/OVRUtil.java
@@ -33,6 +33,7 @@ import javax.media.nativewindow.util.Point;
import javax.media.nativewindow.util.PointImmutable;
import javax.media.nativewindow.util.RectangleImmutable;
+import com.jogamp.oculusvr.OVR;
import com.jogamp.oculusvr.ovrEyeRenderDesc;
import com.jogamp.oculusvr.ovrFovPort;
import com.jogamp.oculusvr.ovrQuatf;
@@ -43,6 +44,7 @@ import com.jogamp.oculusvr.ovrVector2i;
import com.jogamp.oculusvr.ovrVector3f;
import com.jogamp.opengl.math.FovHVHalves;
import com.jogamp.opengl.math.Quaternion;
+import com.jogamp.opengl.util.stereo.StereoDeviceRenderer;
/**
* OculusVR Data Conversion Helper Functions
@@ -122,7 +124,7 @@ public class OVRUtil {
}
public static ovrFovPort getOVRFovPort(final FovHVHalves fovHVHalves) {
final ovrFovPort tanHalfFov = ovrFovPort.create();
- final FovHVHalves fovHVHalvesTan = fovHVHalves.getInTangents();
+ final FovHVHalves fovHVHalvesTan = fovHVHalves.toTangents();
tanHalfFov.setLeftTan(fovHVHalvesTan.left);
tanHalfFov.setRightTan(fovHVHalvesTan.right);
tanHalfFov.setUpTan(fovHVHalvesTan.top);
@@ -130,6 +132,21 @@ public class OVRUtil {
return tanHalfFov;
}
+ public static int ovrDistCaps2DistBits(final int ovrDistortionCaps) {
+ int bits = StereoDeviceRenderer.DISTORTION_BARREL;
+ if( 0 != ( OVR.ovrDistortionCap_TimeWarp & ovrDistortionCaps ) ) {
+ bits |= StereoDeviceRenderer.DISTORTION_TIMEWARP;
+ }
+ if( 0 != ( OVR.ovrDistortionCap_Chromatic & ovrDistortionCaps ) ) {
+ bits |= StereoDeviceRenderer.DISTORTION_CHROMATIC;
+ }
+ if( 0 != ( OVR.ovrDistortionCap_Vignette & ovrDistortionCaps ) ) {
+ bits |= StereoDeviceRenderer.DISTORTION_VIGNETTE;
+ }
+ return bits;
+ }
+
+
public static String toString(final ovrFovPort fov) {
return "["+fov.getLeftTan()+" l, "+fov.getRightTan()+" r, "+
fov.getUpTan()+" u, "+fov.getDownTan()+" d]";
diff --git a/src/test/com/jogamp/opengl/test/junit/jogl/stereo/StereoDemo01.java b/src/test/com/jogamp/opengl/test/junit/jogl/stereo/StereoDemo01.java
index b33b18358..05685c05a 100644
--- a/src/test/com/jogamp/opengl/test/junit/jogl/stereo/StereoDemo01.java
+++ b/src/test/com/jogamp/opengl/test/junit/jogl/stereo/StereoDemo01.java
@@ -30,6 +30,7 @@ package com.jogamp.opengl.test.junit.jogl.stereo;
import java.io.File;
import java.net.URI;
import java.net.URISyntaxException;
+import java.util.Arrays;
import javax.media.nativewindow.util.DimensionImmutable;
import javax.media.nativewindow.util.PointImmutable;
@@ -37,6 +38,8 @@ import javax.media.opengl.GL;
import javax.media.opengl.GLCapabilities;
import javax.media.opengl.GLProfile;
+import jogamp.opengl.util.stereo.GenericStereoDevice;
+
import com.jogamp.common.util.IOUtil;
import com.jogamp.common.util.ReflectionUtil;
import com.jogamp.newt.event.KeyAdapter;
@@ -56,6 +59,7 @@ import com.jogamp.opengl.util.stereo.StereoDeviceRenderer;
import com.jogamp.opengl.util.stereo.StereoDeviceFactory;
import com.jogamp.opengl.util.stereo.StereoClientRenderer;
import com.jogamp.opengl.util.stereo.StereoGLEventListener;
+import com.jogamp.opengl.util.stereo.StereoUtil;
/**
* All distortions, no multisampling, bilinear filtering, manual-swap and using two FBOs (default, good)
@@ -103,8 +107,14 @@ public class StereoDemo01 {
static String useFilmFile = null;
static String useFilmURI = null;
static String stereoRendererListenerName = null;
+ static StereoDeviceFactory.DeviceType deviceType = StereoDeviceFactory.DeviceType.Default;
+ static int deviceIndex = 0;
public static void main(final String args[]) throws InterruptedException, URISyntaxException {
+ boolean useRecommendedDistortionBits = true;
+ int posx = -1;
+ int posy = -1;
+
for(int i=0; i<args.length; i++) {
if(args[i].equals("-time")) {
i++;
@@ -121,18 +131,34 @@ public class StereoDemo01 {
} else if(args[i].equals("-vignette")) {
i++;
useVignette = MiscUtils.atob(args[i], useVignette);
+ useRecommendedDistortionBits = false;
} else if(args[i].equals("-chromatic")) {
i++;
useChromatic = MiscUtils.atob(args[i], useChromatic);
+ useRecommendedDistortionBits = false;
} else if(args[i].equals("-timewarp")) {
i++;
useTimewarp = MiscUtils.atob(args[i], useTimewarp);
+ useRecommendedDistortionBits = false;
} else if(args[i].equals("-vignette")) {
i++;
useVignette = MiscUtils.atob(args[i], useVignette);
+ useRecommendedDistortionBits = false;
} else if(args[i].equals("-mainScreen")) {
i++;
useStereoScreen = !MiscUtils.atob(args[i], useStereoScreen);
+ } else if(args[i].equals("-device")) {
+ i++;
+ deviceType = StereoDeviceFactory.DeviceType.valueOf(args[i]);
+ } else if(args[i].equals("-deviceIndex")) {
+ i++;
+ deviceIndex = MiscUtils.atoi(args[i], deviceIndex);
+ } else if(args[i].equals("-posx")) {
+ i++;
+ posx = MiscUtils.atoi(args[i], posx);
+ } else if(args[i].equals("-posy")) {
+ i++;
+ posy = MiscUtils.atoi(args[i], posy);
} else if(args[i].equals("-autoSwap")) {
i++;
useAutoSwap = MiscUtils.atob(args[i], useAutoSwap);
@@ -173,41 +199,53 @@ public class StereoDemo01 {
movieURI = null;
}
final StereoDemo01 demo01 = new StereoDemo01();
- demo01.doIt(0, upstream, movieSimple, movieURI, biLinear, numSamples, useSingleFBO, useVignette, useChromatic, useTimewarp,
+ demo01.doIt(deviceType, deviceIndex, posx, posy,
+ upstream, movieSimple, movieURI, biLinear, numSamples, useSingleFBO,
+ useRecommendedDistortionBits, useVignette, useChromatic, useTimewarp,
useAutoSwap, true /* useAnimator */, false /* exclusiveContext*/);
}
- public void doIt(final int stereoDeviceIndex,
+ public void doIt(final StereoDeviceFactory.DeviceType deviceType, final int deviceIndex, final int posx, final int posy,
final StereoGLEventListener upstream, final MovieSBSStereo movieSimple, final URI movieURI,
final boolean biLinear, final int numSamples, final boolean useSingleFBO,
- final boolean useVignette, final boolean useChromatic, final boolean useTimewarp,
+ final boolean useRecommendedDistortionBits, final boolean useVignette, final boolean useChromatic, final boolean useTimewarp,
final boolean useAutoSwap, final boolean useAnimator, final boolean exclusiveContext) throws InterruptedException {
System.err.println("glob duration "+duration);
System.err.println("glob useStereoScreen "+useStereoScreen);
+ System.err.println("deviceType "+deviceType);
+ System.err.println("deviceIndex "+deviceIndex);
System.err.println("biLinear "+biLinear);
System.err.println("numSamples "+numSamples);
System.err.println("useSingleFBO "+useSingleFBO);
+ System.err.println("useRecommendedDistortionBits "+useRecommendedDistortionBits);
System.err.println("useVignette "+useVignette);
System.err.println("useChromatic "+useChromatic);
System.err.println("useTimewarp "+useTimewarp);
System.err.println("useAutoSwap "+useAutoSwap);
- final StereoDeviceFactory stereoDeviceFactory = StereoDeviceFactory.createDefaultFactory();
+ final StereoDeviceFactory stereoDeviceFactory = StereoDeviceFactory.createFactory(deviceType);
if( null == stereoDeviceFactory ) {
System.err.println("No StereoDeviceFactory available");
return;
}
- final StereoDevice stereoDevice = stereoDeviceFactory.createDevice(stereoDeviceIndex, true /* verbose */);
+ final StereoDevice stereoDevice = stereoDeviceFactory.createDevice(deviceIndex, null, true /* verbose */);
if( null == stereoDevice ) {
- System.err.println("No StereoDevice.Context available for index "+stereoDeviceIndex);
+ System.err.println("No StereoDevice.Context available for index "+deviceIndex);
return;
}
+ final boolean isGenericDevice = stereoDevice instanceof GenericStereoDevice;
+
+ if( 0 <= posx && 0 <= posy && isGenericDevice ) {
+ ((GenericStereoDevice)stereoDevice).setSurfacePosition(posx, posy);
+ }
+ System.err.println("StereoDevice "+stereoDevice);
+
// Start the sensor which provides the Rift’s pose and motion.
if( !stereoDevice.startSensors(true) ) {
- System.err.println("Could not start sensors on device "+stereoDeviceIndex);
+ System.err.println("Could not start sensors on device "+deviceIndex);
}
//
@@ -232,24 +270,35 @@ public class StereoDemo01 {
}
//
- // Oculus Rift setup
+ // Stereo Device Setup
//
// EyePos.y = ovrHmd_GetFloat(HMD, OVR_KEY_EYE_HEIGHT, EyePos.y);
final FovHVHalves[] defaultEyeFov = stereoDevice.getDefaultFOV();
System.err.println("Default Fov[0]: "+defaultEyeFov[0]);
System.err.println("Default Fov[0]: "+defaultEyeFov[0].toStringInDegrees());
- System.err.println("Default Fov[1]: "+defaultEyeFov[1]);
- System.err.println("Default Fov[1]: "+defaultEyeFov[1].toStringInDegrees());
+ if( defaultEyeFov.length > 1 ) {
+ System.err.println("Default Fov[1]: "+defaultEyeFov[1]);
+ System.err.println("Default Fov[1]: "+defaultEyeFov[1].toStringInDegrees());
+ }
+
+ final float[] eyePositionOffset = null == movieSimple || isGenericDevice ? stereoDevice.getDefaultEyePositionOffset() // default
+ : new float[] { 0f, 0.3f, 0f }; // better fixed movie position
+ System.err.println("Eye Position Offset: "+Arrays.toString(eyePositionOffset));
- final float[] eyePositionOffset = null == movieSimple ? StereoDevice.DEFAULT_EYE_POSITION_OFFSET // default
- : new float[] { 0f, 0.3f, 0f }; // better fixed movie position
final int textureUnit = 0;
- final int distortionBits = ( useVignette ? StereoDeviceRenderer.DISTORTION_VIGNETTE : 0 ) |
- ( useChromatic ? StereoDeviceRenderer.DISTORTION_CHROMATIC : 0 ) |
- ( useTimewarp ? StereoDeviceRenderer.DISTORTION_TIMEWARP : 0 );
+ final int reqDistortionBits;
+ if( useRecommendedDistortionBits ) {
+ reqDistortionBits = stereoDevice.getRecommendedDistortionBits();
+ } else {
+ reqDistortionBits = ( useVignette ? StereoDeviceRenderer.DISTORTION_VIGNETTE : 0 ) |
+ ( useChromatic ? StereoDeviceRenderer.DISTORTION_CHROMATIC : 0 ) |
+ ( useTimewarp ? StereoDeviceRenderer.DISTORTION_TIMEWARP : 0 );
+ }
+ System.err.println("Requesting Distortion Bits: "+StereoUtil.distortionBitsToString(reqDistortionBits));
+
final float pixelsPerDisplayPixel = 1f;
final StereoDeviceRenderer stereoDeviceRenderer =
- stereoDevice.createRenderer(distortionBits, useSingleFBO ? 1 : 2, eyePositionOffset,
+ stereoDevice.createRenderer(reqDistortionBits, useSingleFBO ? 1 : 2, eyePositionOffset,
defaultEyeFov, pixelsPerDisplayPixel, textureUnit);
System.err.println("StereoDeviceRenderer: "+stereoDeviceRenderer);