summaryrefslogtreecommitdiffstats
path: root/src/demos/hdr
diff options
context:
space:
mode:
authorKenneth Russel <[email protected]>2009-06-15 23:12:27 +0000
committerKenneth Russel <[email protected]>2009-06-15 23:12:27 +0000
commit41cd6c47b23975098cd155517790e018670785e7 (patch)
tree247333528ad674d427ba96b1e05810f7961d609e /src/demos/hdr
parent935d2596c13371bb745d921dbcb9f05b0c11a010 (diff)
Copied JOGL_2_SANDBOX r350 on to trunk; JOGL_2_SANDBOX branch is now closed
git-svn-id: file:///usr/local/projects/SUN/JOGL/git-svn/../svn-server-sync/jogl-demos/trunk@352 3298f667-5e0e-4b4a-8ed4-a3559d26a5f4
Diffstat (limited to 'src/demos/hdr')
-rwxr-xr-xsrc/demos/hdr/ARBFPPipeline.java176
-rwxr-xr-xsrc/demos/hdr/CgPipeline.java113
-rwxr-xr-xsrc/demos/hdr/HDR.java1283
-rwxr-xr-xsrc/demos/hdr/HDRTexture.java495
-rwxr-xr-xsrc/demos/hdr/Pipeline.java29
-rwxr-xr-xsrc/demos/hdr/RGBE.java452
-rwxr-xr-xsrc/demos/hdr/readme.txt3
-rwxr-xr-xsrc/demos/hdr/shaders/arbfp1/object.arbfp141
-rwxr-xr-xsrc/demos/hdr/shaders/arbfp1/object_hilo.arbfp145
-rwxr-xr-xsrc/demos/hdr/shaders/arbfp1/object_vp.arbvp142
-rwxr-xr-xsrc/demos/hdr/shaders/arbfp1/recompile.txt7
-rwxr-xr-xsrc/demos/hdr/shaders/arbfp1/shrink.arbfp134
-rwxr-xr-xsrc/demos/hdr/shaders/arbfp1/skybox.arbfp122
-rwxr-xr-xsrc/demos/hdr/shaders/arbfp1/skybox_hilo.arbfp127
-rwxr-xr-xsrc/demos/hdr/shaders/arbfp1/tonemap.arbfp164
-rwxr-xr-xsrc/demos/hdr/shaders/cg/hdr.cg130
-rwxr-xr-xsrc/demos/hdr/shaders/cg/object.cg30
-rwxr-xr-xsrc/demos/hdr/shaders/cg/object_hilo.cg31
-rwxr-xr-xsrc/demos/hdr/shaders/cg/object_vp.cg43
-rwxr-xr-xsrc/demos/hdr/shaders/cg/shrink.cg17
-rwxr-xr-xsrc/demos/hdr/shaders/cg/skybox.cg8
-rwxr-xr-xsrc/demos/hdr/shaders/cg/skybox_hilo.cg9
-rwxr-xr-xsrc/demos/hdr/shaders/cg/tonemap.cg37
23 files changed, 3138 insertions, 0 deletions
diff --git a/src/demos/hdr/ARBFPPipeline.java b/src/demos/hdr/ARBFPPipeline.java
new file mode 100755
index 0000000..04ae00b
--- /dev/null
+++ b/src/demos/hdr/ARBFPPipeline.java
@@ -0,0 +1,176 @@
+package demos.hdr;
+
+import demos.util.FileUtils;
+import java.io.IOException;
+import java.util.ArrayList;
+import java.util.List;
+import javax.media.opengl.GL2ES1;
+import javax.media.opengl.GL2;
+import javax.media.opengl.GLException;
+
+
+public class ARBFPPipeline implements Pipeline {
+
+ private int textureFormat;
+
+ public ARBFPPipeline(int textureFormat) {
+ this.textureFormat = textureFormat;
+ }
+
+ public void init() {
+ }
+
+ public void initFloatingPointTexture(GL2 gl, int textureObject, int w, int h) {
+ gl.glBindTexture(GL2.GL_TEXTURE_RECTANGLE_ARB, textureObject);
+ gl.glCopyTexImage2D(GL2.GL_TEXTURE_RECTANGLE_ARB, 0, textureFormat, 0, 0, w, h, 0);
+ }
+
+ public void initTexture(GL2 gl, int textureObject, int w, int h) {
+ gl.glBindTexture(GL2.GL_TEXTURE_RECTANGLE_ARB, textureObject);
+ gl.glCopyTexImage2D(GL2.GL_TEXTURE_RECTANGLE_ARB, 0, GL2.GL_RGBA, 0, 0, w, h, 0);
+ }
+
+ public void copyToTexture(GL2 gl, int textureObject, int w, int h) {
+ gl.glBindTexture(GL2.GL_TEXTURE_RECTANGLE_ARB, textureObject);
+ gl.glCopyTexSubImage2D(GL2.GL_TEXTURE_RECTANGLE_ARB, 0, 0, 0, 0, 0, w, h);
+ }
+
+ public void bindTexture(GL2 gl, int textureObject) {
+ gl.glBindTexture(GL2.GL_TEXTURE_RECTANGLE_ARB, textureObject);
+ }
+
+ private List programs = new ArrayList();
+ public int loadVertexProgram(GL2 gl, String filename) throws IOException {
+ return loadProgram(gl, filename, GL2.GL_VERTEX_PROGRAM_ARB);
+ }
+
+ public int loadFragmentProgram(GL2 gl, String filename) throws IOException {
+ return loadProgram(gl, filename, GL2.GL_FRAGMENT_PROGRAM_ARB);
+ }
+
+ private int loadProgram(GL2 gl, String fileName, int profile) throws IOException {
+ String programBuffer = FileUtils.loadStreamIntoString(getClass().getClassLoader().getResourceAsStream(fileName));
+ int[] tmpInt = new int[1];
+ gl.glGenProgramsARB(1, tmpInt, 0);
+ int res = tmpInt[0];
+ gl.glBindProgramARB(profile, res);
+ gl.glProgramStringARB(profile, GL2.GL_PROGRAM_FORMAT_ASCII_ARB, programBuffer.length(), programBuffer);
+ int[] errPos = new int[1];
+ gl.glGetIntegerv(GL2.GL_PROGRAM_ERROR_POSITION_ARB, errPos, 0);
+ if (errPos[0] >= 0) {
+ String kind = "Program";
+ if (profile == GL2.GL_VERTEX_PROGRAM_ARB) {
+ kind = "Vertex program";
+ } else if (profile == GL2.GL_FRAGMENT_PROGRAM_ARB) {
+ kind = "Fragment program";
+ }
+ System.out.println(kind + " failed to load:");
+ String errMsg = gl.glGetString(GL2.GL_PROGRAM_ERROR_STRING_ARB);
+ if (errMsg == null) {
+ System.out.println("[No error message available]");
+ } else {
+ System.out.println("Error message: \"" + errMsg + "\"");
+ }
+ System.out.println("Error occurred at position " + errPos[0] + " in program:");
+ int endPos = errPos[0];
+ while (endPos < programBuffer.length() && programBuffer.charAt(endPos) != '\n') {
+ ++endPos;
+ }
+ System.out.println(programBuffer.substring(errPos[0], endPos));
+ throw new GLException("Error loading " + kind);
+ } else {
+ if (profile == GL2.GL_FRAGMENT_PROGRAM_ARB) {
+ int[] isNative = new int[1];
+ gl.glGetProgramiv(GL2.GL_FRAGMENT_PROGRAM_ARB,
+ GL2.GL_PROGRAM_UNDER_NATIVE_LIMITS_ARB,
+ isNative, 0);
+ if (isNative[0] != 1) {
+ System.out.println("WARNING: fragment program is over native resource limits");
+ Thread.dumpStack();
+ }
+ }
+ }
+ return res;
+ }
+
+ public void enableVertexProgram(GL2 gl, int program) {
+ gl.glBindProgramARB(GL2.GL_VERTEX_PROGRAM_ARB, program);
+ gl.glEnable(GL2.GL_VERTEX_PROGRAM_ARB);
+ }
+
+ public void enableFragmentProgram(GL2 gl, int program) {
+ gl.glBindProgramARB(GL2.GL_FRAGMENT_PROGRAM_ARB, program);
+ gl.glEnable(GL2.GL_FRAGMENT_PROGRAM_ARB);
+ }
+
+ public void disableVertexProgram(GL2 gl) {
+ gl.glDisable(GL2.GL_VERTEX_PROGRAM_ARB);
+ }
+
+ public void disableFragmentProgram(GL2 gl) {
+ gl.glDisable(GL2.GL_FRAGMENT_PROGRAM_ARB);
+ }
+
+ public int getNamedParameter(int program, String name) {
+ throw new RuntimeException("Not supported");
+ }
+
+ public void setVertexProgramParameter1f(GL2 gl, int param, float val) {
+ if (param < 0) return;
+ gl.glProgramLocalParameter4fARB(GL2.GL_VERTEX_PROGRAM_ARB, param, val, 0, 0, 0);
+ }
+
+ public void setVertexProgramParameter3f(GL2 gl, int param, float x, float y, float z) {
+ if (param < 0) return;
+ gl.glProgramLocalParameter4fARB(GL2.GL_VERTEX_PROGRAM_ARB, param, x, y, z, 0);
+ }
+
+ public void setVertexProgramParameter4f(GL2 gl, int param, float x, float y, float z, float w) {
+ if (param < 0) return;
+ gl.glProgramLocalParameter4fARB(GL2.GL_VERTEX_PROGRAM_ARB, param, x, y, z, w);
+ }
+
+ public void setFragmentProgramParameter1f(GL2 gl, int param, float val) {
+ if (param < 0) return;
+ gl.glProgramLocalParameter4fARB(GL2.GL_FRAGMENT_PROGRAM_ARB, param, val, 0, 0, 0);
+ }
+
+ public void setFragmentProgramParameter3f(GL2 gl, int param, float x, float y, float z) {
+ if (param < 0) return;
+ gl.glProgramLocalParameter4fARB(GL2.GL_FRAGMENT_PROGRAM_ARB, param, x, y, z, 0);
+ }
+
+ public void setFragmentProgramParameter4f(GL2 gl, int param, float x, float y, float z, float w) {
+ if (param < 0) return;
+ gl.glProgramLocalParameter4fARB(GL2.GL_FRAGMENT_PROGRAM_ARB, param, x, y, z, w);
+ }
+
+ public void trackModelViewProjectionMatrix(GL2 gl, int param) {
+ float[] modelView = new float[16];
+ float[] projection = new float[16];
+ float[] mvp = new float[16];
+
+ // Get matrices
+ gl.glGetFloatv(GL2.GL_PROJECTION_MATRIX, projection, 0);
+ gl.glGetFloatv(GL2.GL_MODELVIEW_MATRIX, modelView, 0);
+ // Multiply together
+ for (int i = 0; i < 4; i++) {
+ for (int j = 0; j < 4; j++) {
+ float sum = 0;
+ for (int k = 0; k < 4; k++) {
+ sum += modelView[4 * i + k] * projection[4 * k + j];
+ }
+ mvp[4 * i + j] = sum;
+ }
+ }
+
+ setMatrixParameterfc(gl, param, mvp);
+ }
+
+ public void setMatrixParameterfc(GL2 gl, int param, float[] matrix) {
+ // Correct for row-major vs. column-major differences
+ for (int i = 0; i < 4; i++) {
+ gl.glProgramLocalParameter4fARB(GL2.GL_VERTEX_PROGRAM_ARB, param + i, matrix[i], matrix[4+i], matrix[8+i], matrix[12+i]);
+ }
+ }
+}
diff --git a/src/demos/hdr/CgPipeline.java b/src/demos/hdr/CgPipeline.java
new file mode 100755
index 0000000..56906f1
--- /dev/null
+++ b/src/demos/hdr/CgPipeline.java
@@ -0,0 +1,113 @@
+package demos.hdr;
+
+import java.io.*;
+import java.util.*;
+
+import javax.media.opengl.*;
+import com.sun.opengl.cg.*;
+import demos.util.*;
+
+public class CgPipeline implements Pipeline {
+ private CGcontext context;
+ public void init() {
+ context = CgGL.cgCreateContext();
+ }
+
+ public void initFloatingPointTexture(GL2 gl, int textureObject, int w, int h) {
+ gl.glBindTexture(GL2.GL_TEXTURE_RECTANGLE_ARB, textureObject);
+ gl.glCopyTexImage2D(GL2.GL_TEXTURE_RECTANGLE_ARB, 0, GL2.GL_FLOAT_RGBA16_NV, 0, 0, w, h, 0);
+ }
+
+ public void initTexture(GL2 gl, int textureObject, int w, int h) {
+ gl.glBindTexture(GL2.GL_TEXTURE_RECTANGLE_ARB, textureObject);
+ gl.glCopyTexImage2D(GL2.GL_TEXTURE_RECTANGLE_ARB, 0, GL.GL_RGBA, 0, 0, w, h, 0);
+ }
+
+ public void copyToTexture(GL2 gl, int textureObject, int w, int h) {
+ gl.glBindTexture(GL2.GL_TEXTURE_RECTANGLE_ARB, textureObject);
+ gl.glCopyTexSubImage2D(GL2.GL_TEXTURE_RECTANGLE_ARB, 0, 0, 0, 0, 0, w, h);
+ }
+
+ public void bindTexture(GL2 gl, int textureObject) {
+ gl.glBindTexture(GL2.GL_TEXTURE_RECTANGLE_ARB, textureObject);
+ }
+
+ private List programs = new ArrayList();
+ public int loadVertexProgram(GL2 gl, String filename) throws IOException {
+ return loadProgram(filename, CgGL.CG_PROFILE_ARBVP1);
+ }
+
+ public int loadFragmentProgram(GL2 gl, String filename) throws IOException {
+ return loadProgram(filename, CgGL.CG_PROFILE_ARBFP1);
+ }
+
+ private int loadProgram(String fileName, int profile) throws IOException {
+ CGprogram prog = CgGL.cgCreateProgramFromFile(context, CgGL.CG_SOURCE, fileName, profile, null, null);
+ if (prog == null) {
+ throw new RuntimeException("Error loading program");
+ }
+ CgGL.cgGLLoadProgram(prog);
+ int res = programs.size();
+ programs.add(prog);
+ return res;
+ }
+
+ public void enableVertexProgram(GL2 gl, int program) {
+ CgGL.cgGLBindProgram((CGprogram) programs.get(program));
+ CgGL.cgGLEnableProfile(CgGL.CG_PROFILE_ARBVP1);
+ }
+
+ public void enableFragmentProgram(GL2 gl, int program) {
+ CgGL.cgGLBindProgram((CGprogram) programs.get(program));
+ CgGL.cgGLEnableProfile(CgGL.CG_PROFILE_ARBFP1);
+ }
+
+ public void disableVertexProgram(GL2 gl) {
+ CgGL.cgGLDisableProfile(CgGL.CG_PROFILE_ARBVP1);
+ }
+
+ public void disableFragmentProgram(GL2 gl) {
+ CgGL.cgGLDisableProfile(CgGL.CG_PROFILE_ARBFP1);
+ }
+
+ private List parameters = new ArrayList();
+ public int getNamedParameter(int program, String name) {
+ CGprogram prog = (CGprogram) programs.get(program);
+ CGparameter param = CgGL.cgGetNamedParameter(prog, name);
+ int res = parameters.size();
+ parameters.add(param);
+ return res;
+ }
+
+ public void setVertexProgramParameter1f(GL2 gl, int param, float val) {
+ CgGL.cgGLSetParameter1f((CGparameter) parameters.get(param), val);
+ }
+
+ public void setVertexProgramParameter3f(GL2 gl, int param, float x, float y, float z) {
+ CgGL.cgGLSetParameter3f((CGparameter) parameters.get(param), x, y, z);
+ }
+
+ public void setVertexProgramParameter4f(GL2 gl, int param, float x, float y, float z, float w) {
+ CgGL.cgGLSetParameter4f((CGparameter) parameters.get(param), x, y, z, w);
+ }
+
+ public void setFragmentProgramParameter1f(GL2 gl, int param, float val) {
+ CgGL.cgGLSetParameter1f((CGparameter) parameters.get(param), val);
+ }
+
+ public void setFragmentProgramParameter3f(GL2 gl, int param, float x, float y, float z) {
+ CgGL.cgGLSetParameter3f((CGparameter) parameters.get(param), x, y, z);
+ }
+
+ public void setFragmentProgramParameter4f(GL2 gl, int param, float x, float y, float z, float w) {
+ CgGL.cgGLSetParameter4f((CGparameter) parameters.get(param), x, y, z, w);
+ }
+
+ public void trackModelViewProjectionMatrix(GL2 gl, int param) {
+ CgGL.cgGLSetStateMatrixParameter((CGparameter) parameters.get(param), CgGL.CG_GL_MODELVIEW_PROJECTION_MATRIX, CgGL.CG_GL_MATRIX_IDENTITY);
+ }
+
+ public void setMatrixParameterfc(GL2 gl, int param, float[] matrix) {
+ CgGL.cgGLSetMatrixParameterfc((CGparameter) parameters.get(param), matrix, 0);
+ }
+}
diff --git a/src/demos/hdr/HDR.java b/src/demos/hdr/HDR.java
new file mode 100755
index 0000000..2f71c3c
--- /dev/null
+++ b/src/demos/hdr/HDR.java
@@ -0,0 +1,1283 @@
+package demos.hdr;
+
+import com.sun.opengl.util.Animator;
+import com.sun.opengl.util.gl2.GLUT;
+import demos.common.Demo;
+import demos.common.DemoListener;
+import demos.util.DurationTimer;
+import demos.util.ObjReader;
+import demos.util.SystemTime;
+import demos.util.Time;
+import gleem.BSphere;
+import gleem.BSphereProvider;
+import gleem.CameraParameters;
+import gleem.ExaminerViewer;
+import gleem.ManipManager;
+import gleem.MouseButtonHelper;
+import gleem.linalg.Mat4f;
+import gleem.linalg.Rotf;
+import gleem.linalg.Vec3f;
+import java.awt.BorderLayout;
+import java.awt.Frame;
+import java.awt.event.KeyAdapter;
+import java.awt.event.KeyEvent;
+import java.awt.event.WindowAdapter;
+import java.awt.event.WindowEvent;
+import java.io.IOException;
+import java.io.InputStream;
+import java.nio.FloatBuffer;
+import java.nio.IntBuffer;
+import javax.media.opengl.GL;
+import javax.media.opengl.GL2ES1;
+import javax.media.opengl.GL2;
+import javax.media.opengl.GLAutoDrawable;
+import javax.media.opengl.GLCapabilities;
+import javax.media.opengl.GLContext;
+import javax.media.opengl.GLDrawableFactory;
+import javax.media.opengl.GLEventListener;
+import javax.media.opengl.GLException;
+import javax.media.opengl.GLPbuffer;
+import javax.media.opengl.GLProfile;
+import javax.media.opengl.awt.AWTGLAutoDrawable;
+import javax.media.opengl.awt.GLCanvas;
+import javax.media.opengl.glu.GLU;
+import javax.swing.JOptionPane;
+
+
+/** HDR demo by NVidia Corporation - Simon Green, [email protected] <P>
+
+ Ported to Java by Kenneth Russell
+*/
+
+public class HDR extends Demo {
+ private static String[] defaultArgs = {
+ "demos/data/images/stpeters_cross.hdr",
+ "512",
+ "384",
+ "2",
+ "7",
+ "3",
+ "demos/data/models/teapot.obj"
+ };
+ private GLAutoDrawable drawable;
+ private boolean useCg;
+ private boolean initComplete;
+ private HDRTexture hdr;
+ private String modelFilename;
+ private ObjReader model;
+ private Pipeline pipeline;
+
+ private GLUT glut = new GLUT();
+
+ private boolean[] b = new boolean[256];
+
+ private ExaminerViewer viewer;
+ private boolean doViewAll = true;
+
+ private DurationTimer timer = new DurationTimer();
+ private boolean firstRender = true;
+ private int frameCount;
+
+ private Time time = new SystemTime();
+ private float animRate = (float) Math.toRadians(-12.0f); // Radians / sec
+
+ private String hdrFilename;
+ private int win_w;
+ private int win_h;
+ private float win_scale;
+ private int pbuffer_w;
+ private int pbuffer_h;
+ private int blurWidth;
+ private int blur_scale;
+ private int blur_w;
+ private int blur_h;
+ private float blurAmount = 0.5f;
+
+ private int modelno = 4;
+ private int numModels = 5;
+
+ private boolean hilo = false;
+ private int hdr_tex;
+ private int hdr_tex2;
+ private int gamma_tex;
+ private int vignette_tex;
+
+ private GLPbuffer pbuffer;
+ private GLPbuffer blur_pbuffer;
+ private GLPbuffer blur2_pbuffer;
+ private GLPbuffer tonemap_pbuffer;
+ // Texture objects for these pbuffers
+ private int pbuffer_tex;
+ private int blur_pbuffer_tex;
+ private int blur2_pbuffer_tex;
+ private int tonemap_pbuffer_tex;
+
+ // Render passes for blur2_pbuffer
+ private static final int BLUR2_SHRINK_PASS = 0;
+ private static final int BLUR2_VERT_BLUR_PASS = 1;
+ private int blur2Pass;
+
+ private int blurh_fprog, blurv_fprog;
+ private int skybox_fprog, object_fprog, object_vprog;
+ private int tonemap_fprog, shrink_fprog;
+ private int blurAmount_param, windowSize_param, exposure_param;
+ private int modelViewProj_param, model_param, eyePos_param;
+
+
+ private float exposure = 32.0f;
+
+ private float[] identityMatrix = { 1.0f, 0.0f, 0.0f, 0.0f,
+ 0.0f, 1.0f, 0.0f, 0.0f,
+ 0.0f, 0.0f, 1.0f, 0.0f,
+ 0.0f, 0.0f, 0.0f, 1.0f };
+
+ public static void main(String[] args) {
+
+ GLCanvas canvas = new GLCanvas();
+ final HDR demo = new HDR();
+
+ canvas.addGLEventListener(demo);
+
+ canvas.addKeyListener(new KeyAdapter() {
+ public void keyPressed(KeyEvent e) {
+ demo.dispatchKey(e.getKeyCode(), e.getKeyChar());
+ }
+ });
+
+ final Animator animator = new Animator(canvas);
+ demo.setDemoListener(new DemoListener() {
+ public void shutdownDemo() {
+ runExit(animator);
+ }
+ public void repaint() {}
+ });
+ demo.setup(args);
+
+ Frame frame = new Frame("High Dynamic Range Rendering Demo");
+ frame.setLayout(new BorderLayout());
+ canvas.setSize(demo.getPreferredWidth(), demo.getPreferredHeight());
+
+ frame.add(canvas, BorderLayout.CENTER);
+ frame.pack();
+ frame.setVisible(true);
+ canvas.requestFocus();
+
+ frame.addWindowListener(new WindowAdapter() {
+ public void windowClosing(WindowEvent e) {
+ runExit(animator);
+ }
+ });
+
+ animator.start();
+ }
+
+ public void setup(String[] args) {
+ if ((args == null) || (args.length == 0)) {
+ args = defaultArgs;
+ }
+
+ if (args.length < 6 || args.length > 8) {
+ usage();
+ }
+
+ try {
+ int argNo = 0;
+ if (args[argNo].equals("-cg")) {
+ useCg = true;
+ ++argNo;
+ }
+ hdrFilename = args[argNo++];
+ pbuffer_w = Integer.parseInt(args[argNo++]);
+ pbuffer_h = Integer.parseInt(args[argNo++]);
+ win_scale = Float.parseFloat(args[argNo++]);
+ blurWidth = Integer.parseInt(args[argNo++]);
+ blur_scale = Integer.parseInt(args[argNo++]);
+ if (argNo < args.length) {
+ modelFilename = args[argNo++];
+ }
+
+ blur_w = pbuffer_w / blur_scale;
+ blur_h = pbuffer_h / blur_scale;
+ win_w = (int) (pbuffer_w * win_scale);
+ win_h = (int) (pbuffer_h * win_scale);
+ } catch (NumberFormatException e) {
+ e.printStackTrace();
+ usage();
+ }
+
+ if (modelFilename != null) {
+ try {
+ InputStream in = getClass().getClassLoader().getResourceAsStream(modelFilename);
+ if (in == null) {
+ throw new IOException("Unable to open model file " + modelFilename);
+ }
+ model = new ObjReader(in);
+ if (model.getVerticesPerFace() != 3) {
+ throw new IOException("Sorry, only triangle-based WaveFront OBJ files supported");
+ }
+ model.rescale(1.2f / model.getRadius());
+ ++numModels;
+ modelno = 5;
+ } catch (IOException e) {
+ e.printStackTrace();
+ System.exit(1);
+ }
+ }
+
+ b['f'] = true; // fragment programs
+ b['g'] = true; // glare
+ b['l'] = true;
+ b[' '] = true; // animation
+ b['n'] = true; // upsampling smoothing
+
+ try {
+ InputStream in = getClass().getClassLoader().getResourceAsStream(hdrFilename);
+ if (in == null) {
+ throw new IOException("Unable to open HDR file " + hdrFilename);
+ }
+ hdr = new HDRTexture(in);
+ hdr.analyze();
+ hdr.convert();
+ } catch (IOException e) {
+ e.printStackTrace();
+ System.exit(0);
+ }
+
+ }
+
+ public int getPreferredWidth() {
+ return win_w;
+ }
+
+ public int getPreferredHeight() {
+ return win_h;
+ }
+
+ //----------------------------------------------------------------------
+ // Internals only below this point
+ //
+
+ public void shutdownDemo() {
+ ManipManager.getManipManager().unregisterWindow((AWTGLAutoDrawable) drawable);
+ drawable.removeGLEventListener(this);
+ super.shutdownDemo();
+ }
+
+ //----------------------------------------------------------------------
+ // Listener for main window
+ //
+
+ private float zNear = 0.1f;
+ private float zFar = 10.0f;
+ private boolean wire = false;
+ private boolean toggleWire = false;
+ private GLU glu = new GLU();
+
+ public void init(GLAutoDrawable drawable) {
+ initComplete = false;
+ // printThreadName("init for Listener");
+
+ GL2 gl = drawable.getGL().getGL2();
+
+ checkExtension(gl, "GL_VERSION_1_3"); // For multitexture
+ checkExtension(gl, "GL_ARB_pbuffer");
+ checkExtension(gl, "GL_ARB_vertex_program");
+ checkExtension(gl, "GL_ARB_fragment_program");
+ if (!gl.isExtensionAvailable("GL_ARB_texture_rectangle") &&
+ !gl.isExtensionAvailable("GL_EXT_texture_rectangle")) {
+ // NOTE: it turns out the constants associated with these extensions are identical
+ unavailableExtension("Texture rectangle extension not available (need either GL_ARB_texture_rectangle or GL_EXT_texture_rectangle");
+ }
+
+ if (!gl.isExtensionAvailable("GL_NV_float_buffer") &&
+ !gl.isExtensionAvailable("GL_ATI_texture_float") &&
+ !gl.isExtensionAvailable("GL_APPLE_float_pixels")) {
+ unavailableExtension("Floating-point textures not available (need one of GL_NV_float_buffer, GL_ATI_texture_float, or GL_APPLE_float_pixels");
+ }
+
+ setOrthoProjection(gl, 0, 0, win_w, win_h);
+
+ gamma_tex = createGammaTexture(gl, 1024, 1.0f / 2.2f);
+ vignette_tex = createVignetteTexture(gl, pbuffer_w, pbuffer_h, 0.25f*pbuffer_w, 0.7f*pbuffer_w);
+
+ int floatBits = 16;
+ int floatAlphaBits = 0;
+ // int floatDepthBits = 16;
+ // Workaround for apparent bug when not using render-to-texture-rectangle
+ int floatDepthBits = 1;
+
+ GLCapabilities caps = new GLCapabilities(null);
+ caps.setDoubleBuffered(false);
+ caps.setPbufferFloatingPointBuffers(true);
+ caps.setRedBits(floatBits);
+ caps.setGreenBits(floatBits);
+ caps.setBlueBits(floatBits);
+ caps.setAlphaBits(floatAlphaBits);
+ caps.setDepthBits(floatDepthBits);
+ int[] tmp = new int[1];
+ if (!GLDrawableFactory.getFactory(GLProfile.getDefault()).canCreateGLPbuffer()) {
+ unavailableExtension("Can not create pbuffer");
+ }
+ if (pbuffer != null) {
+ pbuffer.destroy();
+ pbuffer = null;
+ }
+ if (blur_pbuffer != null) {
+ blur_pbuffer.destroy();
+ blur_pbuffer = null;
+ }
+ if (blur2_pbuffer != null) {
+ blur2_pbuffer.destroy();
+ blur2_pbuffer = null;
+ }
+ if (tonemap_pbuffer != null) {
+ tonemap_pbuffer.destroy();
+ tonemap_pbuffer = null;
+ }
+
+ GLContext parentContext = drawable.getContext();
+ pbuffer = GLDrawableFactory.getFactory(GLProfile.getDefault()).createGLPbuffer(caps, null, pbuffer_w, pbuffer_h, parentContext);
+ pbuffer.addGLEventListener(new PbufferListener());
+ gl.glGenTextures(1, tmp, 0);
+ pbuffer_tex = tmp[0];
+ blur_pbuffer = GLDrawableFactory.getFactory(GLProfile.getDefault()).createGLPbuffer(caps, null, blur_w, blur_h, parentContext);
+ blur_pbuffer.addGLEventListener(new BlurPbufferListener());
+ gl.glGenTextures(1, tmp, 0);
+ blur_pbuffer_tex = tmp[0];
+ blur2_pbuffer = GLDrawableFactory.getFactory(GLProfile.getDefault()).createGLPbuffer(caps, null, blur_w, blur_h, parentContext);
+ blur2_pbuffer.addGLEventListener(new Blur2PbufferListener());
+ gl.glGenTextures(1, tmp, 0);
+ blur2_pbuffer_tex = tmp[0];
+ caps.setPbufferFloatingPointBuffers(false);
+ caps.setRedBits(8);
+ caps.setGreenBits(8);
+ caps.setBlueBits(8);
+ caps.setDepthBits(24);
+ tonemap_pbuffer = GLDrawableFactory.getFactory(GLProfile.getDefault()).createGLPbuffer(caps, null, pbuffer_w, pbuffer_h, parentContext);
+ tonemap_pbuffer.addGLEventListener(new TonemapPbufferListener());
+ gl.glGenTextures(1, tmp, 0);
+ tonemap_pbuffer_tex = tmp[0];
+
+ doViewAll = true;
+
+ // Register the window with the ManipManager
+ ManipManager manager = ManipManager.getManipManager();
+ manager.registerWindow((AWTGLAutoDrawable) drawable);
+ this.drawable = drawable;
+
+ viewer = new ExaminerViewer(MouseButtonHelper.numMouseButtons());
+ viewer.setAutoRedrawMode(false);
+ viewer.setNoAltKeyMode(true);
+ viewer.attach((AWTGLAutoDrawable) drawable, new BSphereProvider() {
+ public BSphere getBoundingSphere() {
+ return new BSphere(new Vec3f(0, 0, 0), 1.0f);
+ }
+ });
+ viewer.setZNear(zNear);
+ viewer.setZFar(zFar);
+ initComplete = true;
+ }
+
+ public void dispose(GLAutoDrawable drawable) {
+ }
+
+ public void display(GLAutoDrawable drawable) {
+ // printThreadName("display for Listener");
+
+ if (!initComplete) {
+ return;
+ }
+
+ if (!firstRender) {
+ if (++frameCount == 30) {
+ timer.stop();
+ System.err.println("Frames per second: " + (30.0f / timer.getDurationAsSeconds()));
+ timer.reset();
+ timer.start();
+ frameCount = 0;
+ }
+ } else {
+ firstRender = false;
+ timer.start();
+ }
+
+ time.update();
+
+ GL2 gl = drawable.getGL().getGL2();
+
+ // OK, ready to go
+ if (b[' ']) {
+ viewer.rotateAboutFocalPoint(new Rotf(Vec3f.Y_AXIS, (float) (time.deltaT() * animRate)));
+ }
+
+ pbuffer.display();
+
+ // FIXME: because of changes in lazy pbuffer instantiation
+ // behavior the pbuffer might not have been run just now
+ if (pipeline == null) {
+ return;
+ }
+
+ // blur pass
+ if (b['g']) {
+ // shrink image
+ blur2Pass = BLUR2_SHRINK_PASS;
+ blur2_pbuffer.display();
+ }
+
+ // horizontal blur
+ blur_pbuffer.display();
+
+ // vertical blur
+ blur2Pass = BLUR2_VERT_BLUR_PASS;
+ blur2_pbuffer.display();
+
+ // tone mapping pass
+ tonemap_pbuffer.display();
+
+ // display in window
+ gl.glEnable(GL2.GL_TEXTURE_RECTANGLE_ARB);
+ gl.glActiveTexture(GL2.GL_TEXTURE0);
+ gl.glBindTexture(GL2.GL_TEXTURE_RECTANGLE_ARB, tonemap_pbuffer_tex);
+ if (b['n']) {
+ gl.glTexParameteri( GL2.GL_TEXTURE_RECTANGLE_ARB, GL2.GL_TEXTURE_MAG_FILTER, GL2.GL_LINEAR);
+ } else {
+ gl.glTexParameteri( GL2.GL_TEXTURE_RECTANGLE_ARB, GL2.GL_TEXTURE_MAG_FILTER, GL2.GL_NEAREST);
+ }
+ drawQuadRect4(gl, win_w, win_h, pbuffer_w, pbuffer_h);
+ gl.glDisable(GL2.GL_TEXTURE_RECTANGLE_ARB);
+
+ // Try to avoid swamping the CPU on Linux
+ Thread.yield();
+ }
+
+ public void reshape(GLAutoDrawable drawable, int x, int y, int width, int height) {
+ setOrthoProjection(drawable.getGL().getGL2(), x, y, width, height);
+ win_w = width;
+ win_h = height;
+ }
+
+ // Unused routines
+ public void displayChanged(GLAutoDrawable drawable, boolean modeChanged, boolean deviceChanged) {}
+
+ private void checkExtension(GL gl, String glExtensionName) {
+ if (!gl.isExtensionAvailable(glExtensionName)) {
+ unavailableExtension("Unable to initialize " + glExtensionName + " OpenGL extension");
+ }
+ }
+
+ private void unavailableExtension(String message) {
+ JOptionPane.showMessageDialog(null, message, "Unavailable extension", JOptionPane.ERROR_MESSAGE);
+ shutdownDemo();
+ throw new GLException(message);
+ }
+
+ private void dispatchKey(int keyCode, char k) {
+ if (k < 256)
+ b[k] = !b[k];
+
+ switch (keyCode) {
+ case KeyEvent.VK_ESCAPE:
+ case KeyEvent.VK_Q:
+ shutdownDemo();
+ break;
+
+ case KeyEvent.VK_EQUALS:
+ exposure *= 2;
+ break;
+
+ case KeyEvent.VK_MINUS:
+ exposure *= 0.5f;
+ break;
+
+ case KeyEvent.VK_PLUS:
+ exposure += 1.0f;
+ break;
+
+ case KeyEvent.VK_UNDERSCORE:
+ exposure -= 1.0f;
+ break;
+
+ case KeyEvent.VK_PERIOD:
+ blurAmount += 0.1f;
+ break;
+
+ case KeyEvent.VK_COMMA:
+ blurAmount -= 0.1f;
+ break;
+
+ case KeyEvent.VK_G:
+ if (b['g'])
+ blurAmount = 0.5f;
+ else
+ blurAmount = 0.0f;
+ break;
+
+ case KeyEvent.VK_O:
+ modelno = (modelno + 1) % numModels;
+ break;
+
+ case KeyEvent.VK_V:
+ doViewAll = true;
+ break;
+ }
+ }
+
+ // create gamma lookup table texture
+ private int createGammaTexture(GL2 gl, int size, float gamma) {
+ int[] tmp = new int[1];
+ gl.glGenTextures(1, tmp, 0);
+ int texid = tmp[0];
+
+ int target = GL2.GL_TEXTURE_1D;
+ gl.glBindTexture(target, texid);
+ gl.glTexParameteri(target, GL2.GL_TEXTURE_MAG_FILTER, GL2.GL_NEAREST);
+ gl.glTexParameteri(target, GL2.GL_TEXTURE_MIN_FILTER, GL2.GL_NEAREST);
+ gl.glTexParameteri(target, GL2.GL_TEXTURE_WRAP_S, GL2.GL_CLAMP_TO_EDGE);
+
+ gl.glPixelStorei(GL2.GL_UNPACK_ALIGNMENT, 1);
+
+ float[] img = new float [size];
+
+ for(int i=0; i<size; i++) {
+ float x = i / (float) size;
+ img[i] = (float) Math.pow(x, gamma);
+ }
+
+ gl.glTexImage1D(target, 0, GL2.GL_LUMINANCE, size, 0, GL2.GL_LUMINANCE, GL2.GL_FLOAT, FloatBuffer.wrap(img));
+
+ return texid;
+ }
+
+ // create vignette texture
+ // based on Debevec's pflare.c
+ int createVignetteTexture(GL gl, int xsiz, int ysiz, float r0, float r1) {
+ int[] tmp = new int[1];
+ gl.glGenTextures(1, tmp, 0);
+ int texid = tmp[0];
+
+ gl.glBindTexture(GL2.GL_TEXTURE_RECTANGLE_ARB, texid);
+ gl.glTexParameteri(GL2.GL_TEXTURE_RECTANGLE_ARB, GL2.GL_TEXTURE_MAG_FILTER, GL2.GL_NEAREST);
+ gl.glTexParameteri(GL2.GL_TEXTURE_RECTANGLE_ARB, GL2.GL_TEXTURE_MIN_FILTER, GL2.GL_NEAREST);
+ gl.glTexParameteri(GL2.GL_TEXTURE_RECTANGLE_ARB, GL2.GL_TEXTURE_WRAP_S, GL2.GL_CLAMP_TO_EDGE);
+ gl.glTexParameteri(GL2.GL_TEXTURE_RECTANGLE_ARB, GL2.GL_TEXTURE_WRAP_T, GL2.GL_CLAMP_TO_EDGE);
+
+ gl.glPixelStorei(GL2.GL_UNPACK_ALIGNMENT, 1);
+
+ float[] img = new float [xsiz*ysiz];
+
+ for (int y = 0; y < ysiz; y++) {
+ for (int x = 0; x < xsiz; x++) {
+ float radius = (float) Math.sqrt((x-xsiz/2)*(x-xsiz/2) + (y-ysiz/2)*(y-ysiz/2));
+ if (radius > r0) {
+ if (radius < r1) {
+ float t = 1.0f - (radius-r0)/(r1-r0);
+ float a = t * 2 - 1;
+ float reduce = (float) ((0.25 * Math.PI + 0.5 * Math.asin(a) + 0.5 * a * Math.sqrt( 1 - a*a ))/(0.5 * Math.PI));
+ img[y*xsiz + x] = reduce;
+ } else {
+ img[y*xsiz + x] = 0.0f;
+ }
+ } else {
+ img[y*xsiz + x] = 1.0f;
+ }
+ }
+ }
+
+ gl.glTexImage2D(GL2.GL_TEXTURE_RECTANGLE_ARB, 0, GL2.GL_LUMINANCE, xsiz, ysiz, 0, GL2.GL_LUMINANCE, GL2.GL_FLOAT, FloatBuffer.wrap(img));
+
+ return texid;
+ }
+
+ //----------------------------------------------------------------------
+ // Listeners for pbuffers
+ //
+
+ class PbufferListener implements GLEventListener {
+ public void init(GLAutoDrawable drawable) {
+ // printThreadName("init for PbufferListener");
+
+ // drawable.setGL(new DebugGL(drawable.getGL()));
+
+ GL2 gl = drawable.getGL().getGL2();
+ gl.glEnable(GL2.GL_DEPTH_TEST);
+
+ // FIXME: what about the ExaminerViewer?
+ setPerspectiveProjection(gl, pbuffer_w, pbuffer_h);
+
+ GLPbuffer pbuffer = (GLPbuffer) drawable;
+ int fpmode = pbuffer.getFloatingPointMode();
+ int texmode = 0;
+ switch (fpmode) {
+ case GLPbuffer.NV_FLOAT:
+ System.err.println("Creating HILO cubemap");
+ hdr_tex = hdr.createCubemapHILO(gl, true);
+ hdr_tex2 = hdr.createCubemapHILO(gl, false);
+ texmode = GL2.GL_FLOAT_RGBA16_NV;
+ hilo = true;
+ break;
+ case GLPbuffer.APPLE_FLOAT:
+ System.err.println("Creating FLOAT16_APPLE cubemap");
+ hdr_tex = hdr.createCubemap(gl, GL2.GL_RGB_FLOAT16_APPLE);
+ texmode = GL2.GL_RGBA_FLOAT16_APPLE;
+ break;
+ case GLPbuffer.ATI_FLOAT:
+ System.err.println("Creating FLOAT16_ATI cubemap");
+ hdr_tex = hdr.createCubemap(gl, GL2.GL_RGB_FLOAT16_ATI);
+ texmode = GL2.GL_RGBA_FLOAT16_ATI;
+ break;
+ default:
+ throw new RuntimeException("Unexpected floating-point mode " + fpmode);
+ }
+
+ if (useCg) {
+ initCg(gl);
+ } else {
+ initARBFP(gl, texmode);
+ }
+ initBlurCode(gl, blurWidth);
+
+ pipeline.initFloatingPointTexture(gl, pbuffer_tex, pbuffer_w, pbuffer_h);
+ }
+
+ public void display(GLAutoDrawable drawable) {
+ // printThreadName("display for PbufferListener");
+
+ GL2 gl = drawable.getGL().getGL2();
+
+ renderScene(gl);
+
+ // Copy results back to texture
+ pipeline.copyToTexture(gl, pbuffer_tex, pbuffer_w, pbuffer_h);
+ }
+
+ // Unused routines
+ public void reshape(GLAutoDrawable drawable, int x, int y, int width, int height) {}
+ public void displayChanged(GLAutoDrawable drawable, boolean modeChanged, boolean deviceChanged) {}
+ public void dispose(GLAutoDrawable drawable) {}
+
+ //----------------------------------------------------------------------
+ // Internals only below this point
+ //
+
+ // render scene to float pbuffer
+ private void renderScene(GL2 gl) {
+ gl.glClear(GL2.GL_COLOR_BUFFER_BIT | GL2.GL_DEPTH_BUFFER_BIT);
+
+ if (doViewAll) {
+ viewer.viewAll(gl);
+ }
+
+ if (b['w'])
+ gl.glPolygonMode(GL2.GL_FRONT_AND_BACK, GL2.GL_LINE);
+ else
+ gl.glPolygonMode(GL2.GL_FRONT_AND_BACK, GL2.GL_FILL);
+
+ if (b['m']) {
+ gl.glEnable(GL2.GL_MULTISAMPLE);
+ gl.glHint(GL2.GL_MULTISAMPLE_FILTER_HINT_NV, GL2.GL_NICEST);
+ } else {
+ gl.glDisable(GL2.GL_MULTISAMPLE);
+ }
+
+ if (!b['e']) {
+ // draw background
+ pipeline.enableFragmentProgram(gl, skybox_fprog);
+ gl.glDisable(GL2.GL_DEPTH_TEST);
+ drawSkyBox(gl);
+ gl.glEnable(GL2.GL_DEPTH_TEST);
+ }
+
+ // draw object
+ pipeline.enableVertexProgram(gl, object_vprog);
+ pipeline.enableFragmentProgram(gl, object_fprog);
+
+ gl.glMatrixMode(GL2.GL_TEXTURE);
+ gl.glLoadIdentity();
+ viewer.update();
+ viewer.updateInverseRotation(gl);
+
+ gl.glMatrixMode( GL2.GL_MODELVIEW );
+ gl.glLoadIdentity();
+ CameraParameters params = viewer.getCameraParameters();
+ Mat4f view = params.getModelviewMatrix();
+ applyTransform(gl, view);
+
+ pipeline.trackModelViewProjectionMatrix(gl, modelViewProj_param);
+
+ // FIXME: add interation for object separately from camera?
+ // cgGLSetMatrixParameterfc(model_param, object.get_transform().get_value());
+ pipeline.setMatrixParameterfc(gl, model_param, identityMatrix);
+
+ // calculate eye position in cubemap space
+ Vec3f eyePos_eye = new Vec3f();
+ Vec3f eyePos_model = new Vec3f();
+ view.invertRigid();
+ view.xformPt(eyePos_eye, eyePos_model);
+ pipeline.setVertexProgramParameter3f(gl, eyePos_param, eyePos_model.x(), eyePos_model.y(), eyePos_model.z());
+
+ gl.glActiveTexture(GL2.GL_TEXTURE0);
+ gl.glBindTexture(GL2.GL_TEXTURE_CUBE_MAP, hdr_tex);
+ gl.glEnable(GL2.GL_TEXTURE_CUBE_MAP);
+
+ boolean linear = b['l'];
+ if (linear) {
+ gl.glTexParameteri(GL2.GL_TEXTURE_CUBE_MAP, GL2.GL_TEXTURE_MIN_FILTER, GL2.GL_LINEAR_MIPMAP_LINEAR);
+ gl.glTexParameteri( GL2.GL_TEXTURE_CUBE_MAP, GL2.GL_TEXTURE_MAG_FILTER, GL2.GL_LINEAR);
+ } else {
+ // glTexParameteri( GL2.GL_TEXTURE_CUBE_MAP, GL2.GL_TEXTURE_MIN_FILTER, GL2.GL_NEAREST_MIPMAP_NEAREST);
+ gl.glTexParameteri( GL2.GL_TEXTURE_CUBE_MAP, GL2.GL_TEXTURE_MIN_FILTER, GL2.GL_NEAREST);
+ gl.glTexParameteri( GL2.GL_TEXTURE_CUBE_MAP, GL2.GL_TEXTURE_MAG_FILTER, GL2.GL_NEAREST);
+ }
+
+ if (hilo) {
+ gl.glActiveTexture(GL2.GL_TEXTURE1);
+ gl.glBindTexture(GL2.GL_TEXTURE_CUBE_MAP, hdr_tex2);
+ gl.glEnable(GL2.GL_TEXTURE_CUBE_MAP);
+
+ if (linear) {
+ gl.glTexParameteri( GL2.GL_TEXTURE_CUBE_MAP, GL2.GL_TEXTURE_MIN_FILTER, GL2.GL_LINEAR_MIPMAP_LINEAR);
+ gl.glTexParameteri( GL2.GL_TEXTURE_CUBE_MAP, GL2.GL_TEXTURE_MAG_FILTER, GL2.GL_LINEAR);
+ } else {
+ // glTexParameteri( GL2.GL_TEXTURE_CUBE_MAP, GL2.GL_TEXTURE_MIN_FILTER, GL2.GL_NEAREST_MIPMAP_NEAREST);
+ gl.glTexParameteri( GL2.GL_TEXTURE_CUBE_MAP, GL2.GL_TEXTURE_MIN_FILTER, GL2.GL_NEAREST);
+ gl.glTexParameteri( GL2.GL_TEXTURE_CUBE_MAP, GL2.GL_TEXTURE_MAG_FILTER, GL2.GL_NEAREST);
+ }
+ }
+
+ gl.glEnable(GL2.GL_CULL_FACE);
+
+ switch(modelno) {
+ case 0:
+ glut.glutSolidTorus( 0.25, 0.5, 40, 40);
+ break;
+ case 1:
+ glut.glutSolidSphere(0.75f, 40, 40);
+ break;
+ case 2:
+ glut.glutSolidTetrahedron();
+ break;
+ case 3:
+ glut.glutSolidCube(1.0f);
+ break;
+ case 4:
+ // Something about the teapot's geometry causes bad artifacts
+ // glut.glutSolidTeapot(gl, 1.0f);
+ break;
+ case 5:
+ gl.glEnableClientState(GL2.GL_VERTEX_ARRAY);
+ gl.glEnableClientState(GL2.GL_NORMAL_ARRAY);
+ gl.glVertexPointer(3, GL2.GL_FLOAT, 0, model.getVertices());
+ gl.glNormalPointer(GL2.GL_FLOAT, 0, model.getVertexNormals());
+ int[] indices = model.getFaceIndices();
+ gl.glDrawElements(GL2.GL_TRIANGLES, indices.length, GL2.GL_UNSIGNED_INT, IntBuffer.wrap(indices));
+ gl.glDisableClientState(GL2.GL_VERTEX_ARRAY);
+ gl.glDisableClientState(GL2.GL_NORMAL_ARRAY);
+ break;
+ }
+
+ gl.glDisable(GL2.GL_CULL_FACE);
+ pipeline.disableVertexProgram(gl);
+ pipeline.disableFragmentProgram(gl);
+ gl.glPolygonMode(GL2.GL_FRONT_AND_BACK, GL2.GL_FILL);
+ }
+ }
+
+ class BlurPbufferListener implements GLEventListener {
+ public void init(GLAutoDrawable drawable) {
+ // printThreadName("init for BlurPbufferListener");
+
+ // drawable.setGL(new DebugGL(drawable.getGL()));
+
+ GL2 gl = drawable.getGL().getGL2();
+
+ // FIXME: what about the ExaminerViewer?
+ setOrthoProjection(gl, 0, 0, blur_w, blur_h);
+
+ pipeline.initFloatingPointTexture(gl, blur_pbuffer_tex, blur_w, blur_h);
+ }
+
+ public void display(GLAutoDrawable drawable) {
+ // printThreadName("display for BlurPbufferListener");
+
+ GL2 gl = drawable.getGL().getGL2();
+
+ // horizontal blur
+ gl.glBindProgramARB(GL2.GL_FRAGMENT_PROGRAM_ARB, blurh_fprog);
+ gl.glActiveTexture(GL2.GL_TEXTURE0);
+ pipeline.bindTexture(gl, blur2_pbuffer_tex);
+ glowPass(gl);
+
+ pipeline.copyToTexture(gl, blur_pbuffer_tex, blur_w, blur_h);
+ }
+
+ // Unused routines
+ public void reshape(GLAutoDrawable drawable, int x, int y, int width, int height) {}
+ public void displayChanged(GLAutoDrawable drawable, boolean modeChanged, boolean deviceChanged) {}
+ public void dispose(GLAutoDrawable drawable) {}
+ }
+
+ class Blur2PbufferListener implements GLEventListener {
+ public void init(GLAutoDrawable drawable) {
+ // printThreadName("init for Blur2PbufferListener");
+
+ // drawable.setGL(new DebugGL(drawable.getGL()));
+
+ GL2 gl = drawable.getGL().getGL2();
+ // FIXME: what about the ExaminerViewer?
+ setOrthoProjection(gl, 0, 0, blur_w, blur_h);
+
+ pipeline.initFloatingPointTexture(gl, blur2_pbuffer_tex, blur_w, blur_h);
+ }
+
+ public void display(GLAutoDrawable drawable) {
+ // printThreadName("display for Blur2PbufferListener");
+
+ GL2 gl = drawable.getGL().getGL2();
+
+ if (blur2Pass == BLUR2_SHRINK_PASS) {
+ gl.glClear(GL2.GL_COLOR_BUFFER_BIT);
+
+ pipeline.enableFragmentProgram(gl, shrink_fprog);
+ setOrthoProjection(gl, 0, 0, blur_w, blur_h);
+ gl.glActiveTexture(GL2.GL_TEXTURE0);
+ gl.glBindTexture(GL2.GL_TEXTURE_RECTANGLE_ARB, pbuffer_tex);
+ drawQuadRect2(gl, blur_w, blur_h, pbuffer_w, pbuffer_h);
+ pipeline.disableFragmentProgram(gl);
+
+ } else if (blur2Pass == BLUR2_VERT_BLUR_PASS) {
+
+ // vertical blur
+ gl.glBindProgramARB(GL2.GL_FRAGMENT_PROGRAM_ARB, blurv_fprog);
+ gl.glActiveTexture(GL2.GL_TEXTURE0);
+ pipeline.bindTexture(gl, blur_pbuffer_tex);
+ glowPass(gl);
+
+ } else {
+ throw new RuntimeException("Illegal value of blur2Pass: " + blur2Pass);
+ }
+
+ pipeline.copyToTexture(gl, blur2_pbuffer_tex, blur_w, blur_h);
+ }
+
+ // Unused routines
+ public void reshape(GLAutoDrawable drawable, int x, int y, int width, int height) {}
+ public void displayChanged(GLAutoDrawable drawable, boolean modeChanged, boolean deviceChanged) {}
+ public void dispose(GLAutoDrawable drawable) {}
+ }
+
+ class TonemapPbufferListener implements GLEventListener {
+ public void init(GLAutoDrawable drawable) {
+ GL2 gl = drawable.getGL().getGL2();
+
+ setOrthoProjection(gl, 0, 0, pbuffer_w, pbuffer_h);
+
+ pipeline.initTexture(gl, tonemap_pbuffer_tex, pbuffer_w, pbuffer_h);
+ }
+
+ public void display(GLAutoDrawable drawable) {
+ GL2 gl = drawable.getGL().getGL2();
+
+ toneMappingPass(gl);
+
+ pipeline.copyToTexture(gl, tonemap_pbuffer_tex, pbuffer_w, pbuffer_h);
+ }
+
+ // Unused routines
+ public void reshape(GLAutoDrawable drawable, int x, int y, int width, int height) {}
+ public void displayChanged(GLAutoDrawable drawable, boolean modeChanged, boolean deviceChanged) {}
+ public void dispose(GLAutoDrawable drawable) {}
+ }
+
+ //----------------------------------------------------------------------
+ // Rendering routines
+ //
+
+ private void setOrthoProjection(GL2 gl, int x, int y, int w, int h) {
+ gl.glMatrixMode(GL2.GL_PROJECTION);
+ gl.glLoadIdentity();
+ gl.glOrtho(0, w, 0, h, -1.0, 1.0);
+ gl.glMatrixMode(GL2.GL_TEXTURE);
+ gl.glLoadIdentity();
+ gl.glMatrixMode(GL2.GL_MODELVIEW);
+ gl.glLoadIdentity();
+ gl.glViewport(x, y, w, h);
+ }
+
+ private void setPerspectiveProjection(GL2 gl, int w, int h) {
+ // FIXME: what about ExaminerViewer?
+ gl.glMatrixMode(GL2.GL_PROJECTION);
+ gl.glLoadIdentity();
+ glu.gluPerspective(60.0, (float) w / (float) h, 0.1, 10.0);
+ gl.glMatrixMode(GL2.GL_MODELVIEW);
+ gl.glLoadIdentity();
+ gl.glViewport(0, 0, w, h);
+ }
+
+ // blur floating point image
+ private void glowPass(GL2 gl) {
+ gl.glDisable(GL2.GL_DEPTH_TEST);
+ gl.glEnable(GL2.GL_FRAGMENT_PROGRAM_ARB);
+
+ setOrthoProjection(gl, 0, 0, blur_w, blur_h);
+ drawQuadRect(gl, blur_w, blur_h);
+
+ gl.glDisable(GL2.GL_FRAGMENT_PROGRAM_ARB);
+ }
+
+ private void drawQuadRect(GL2 gl, int w, int h) {
+ gl.glBegin(GL2.GL_QUADS);
+ gl.glTexCoord2f(0, h); gl.glMultiTexCoord2f(GL2.GL_TEXTURE1, 0, h / blur_scale); gl.glVertex3f(0, h, 0);
+ gl.glTexCoord2f(w, h); gl.glMultiTexCoord2f(GL2.GL_TEXTURE1, w / blur_scale, h / blur_scale); gl.glVertex3f(w, h, 0);
+ gl.glTexCoord2f(w, 0); gl.glMultiTexCoord2f(GL2.GL_TEXTURE1, w / blur_scale, 0); gl.glVertex3f(w, 0, 0);
+ gl.glTexCoord2f(0, 0); gl.glMultiTexCoord2f(GL2.GL_TEXTURE1, 0, 0); gl.glVertex3f(0, 0, 0);
+ gl.glEnd();
+ }
+
+ private void drawQuadRect2(GL2 gl, int w, int h, int tw, int th) {
+ gl.glBegin(GL2.GL_QUADS);
+ gl.glTexCoord2f(0, th); gl.glVertex3f(0, h, 0);
+ gl.glTexCoord2f(tw, th); gl.glVertex3f(w, h, 0);
+ gl.glTexCoord2f(tw, 0); gl.glVertex3f(w, 0, 0);
+ gl.glTexCoord2f(0, 0); gl.glVertex3f(0, 0, 0);
+ gl.glEnd();
+ }
+
+ private void drawQuadRect4(GL2 gl, int w, int h, int tw, int th) {
+ float offset = 0.5f;
+ gl.glBegin(GL2.GL_QUADS);
+ gl.glTexCoord2f(offset, th - offset); gl.glVertex3f(0, h, 0);
+ gl.glTexCoord2f(tw - offset, th - offset); gl.glVertex3f(w, h, 0);
+ gl.glTexCoord2f(tw - offset, offset); gl.glVertex3f(w, 0, 0);
+ gl.glTexCoord2f(offset, offset); gl.glVertex3f(0, 0, 0);
+ gl.glEnd();
+ }
+
+ private void disableTexGen(GL gl) {
+ gl.glDisable(GL2.GL_TEXTURE_GEN_S);
+ gl.glDisable(GL2.GL_TEXTURE_GEN_T);
+ gl.glDisable(GL2.GL_TEXTURE_GEN_R);
+ }
+
+ private void enableTexGen(GL gl) {
+ gl.glEnable(GL2.GL_TEXTURE_GEN_S);
+ gl.glEnable(GL2.GL_TEXTURE_GEN_T);
+ gl.glEnable(GL2.GL_TEXTURE_GEN_R);
+ }
+
+ // draw cubemap background
+ private void drawSkyBox(GL2 gl) {
+ gl.glActiveTexture(GL2.GL_TEXTURE0);
+ gl.glBindTexture(GL2.GL_TEXTURE_CUBE_MAP, hdr_tex);
+ gl.glEnable(GL2.GL_TEXTURE_CUBE_MAP);
+
+ if (hilo) {
+ gl.glActiveTexture(GL2.GL_TEXTURE1);
+ gl.glBindTexture(GL2.GL_TEXTURE_CUBE_MAP, hdr_tex2);
+ gl.glEnable(GL2.GL_TEXTURE_CUBE_MAP);
+ }
+
+ // initialize object linear texgen
+ gl.glActiveTexture(GL2.GL_TEXTURE0);
+ gl.glMatrixMode(GL2.GL_MODELVIEW);
+ gl.glPushMatrix();
+ gl.glLoadIdentity();
+ float[] s_plane = { 1.0f, 0.0f, 0.0f, 0.0f };
+ float[] t_plane = { 0.0f, 1.0f, 0.0f, 0.0f };
+ float[] r_plane = { 0.0f, 0.0f, 1.0f, 0.0f };
+ gl.glTexGenfv(GL2.GL_S, GL2.GL_OBJECT_PLANE, s_plane, 0);
+ gl.glTexGenfv(GL2.GL_T, GL2.GL_OBJECT_PLANE, t_plane, 0);
+ gl.glTexGenfv(GL2.GL_R, GL2.GL_OBJECT_PLANE, r_plane, 0);
+ gl.glPopMatrix();
+ gl.glTexGeni(GL2.GL_S, GL2.GL_TEXTURE_GEN_MODE, GL2.GL_OBJECT_LINEAR);
+ gl.glTexGeni(GL2.GL_T, GL2.GL_TEXTURE_GEN_MODE, GL2.GL_OBJECT_LINEAR);
+ gl.glTexGeni(GL2.GL_R, GL2.GL_TEXTURE_GEN_MODE, GL2.GL_OBJECT_LINEAR);
+ enableTexGen(gl);
+
+ gl.glTexEnvi(GL2.GL_TEXTURE_ENV, GL2.GL_TEXTURE_ENV_MODE, GL2.GL_REPLACE);
+
+ gl.glMatrixMode(GL2.GL_TEXTURE);
+ gl.glPushMatrix();
+ gl.glLoadIdentity();
+ viewer.updateInverseRotation(gl);
+
+ gl.glMatrixMode(GL2.GL_MODELVIEW);
+ gl.glPushMatrix();
+ gl.glLoadIdentity();
+ gl.glScalef(10.0f, 10.0f, 10.0f);
+ glut.glutSolidCube(1.0f);
+ gl.glPopMatrix();
+
+ gl.glDisable(GL2.GL_TEXTURE_CUBE_MAP);
+
+ gl.glMatrixMode(GL2.GL_TEXTURE);
+ gl.glPopMatrix();
+ gl.glMatrixMode(GL2.GL_MODELVIEW);
+
+ disableTexGen(gl);
+ }
+
+ // read from float texture, apply tone mapping, render to regular 8/8/8 display
+ private void toneMappingPass(GL2 gl) {
+ gl.glFinish();
+
+ gl.glActiveTexture(GL2.GL_TEXTURE0);
+ gl.glBindTexture(GL2.GL_TEXTURE_RECTANGLE_ARB, pbuffer_tex);
+
+ gl.glActiveTexture(GL2.GL_TEXTURE1);
+ if (blur2_pbuffer != null) {
+ gl.glBindTexture(GL2.GL_TEXTURE_RECTANGLE_ARB, blur2_pbuffer_tex);
+ }
+
+ gl.glActiveTexture(GL2.GL_TEXTURE2);
+ gl.glBindTexture(GL2.GL_TEXTURE_1D, gamma_tex);
+
+ gl.glActiveTexture(GL2.GL_TEXTURE3);
+ pipeline.bindTexture(gl, vignette_tex);
+
+ pipeline.enableFragmentProgram(gl, tonemap_fprog);
+
+ pipeline.setFragmentProgramParameter1f(gl, blurAmount_param, blurAmount);
+ pipeline.setFragmentProgramParameter4f(gl, windowSize_param, 2.0f/win_w, 2.0f/win_h, -1.0f, -1.0f);
+ pipeline.setFragmentProgramParameter1f(gl, exposure_param, exposure);
+
+ drawQuadRect(gl, win_w, win_h);
+
+ pipeline.disableFragmentProgram(gl);
+ }
+
+ //----------------------------------------------------------------------
+ // Cg and blur code initialization
+ //
+
+ private String shaderRoot = "demos/hdr/shaders/";
+ private void initCg(GL2 gl) {
+ // NOTE: need to instantiate CgPipeline reflectively to avoid
+ // compile-time dependence (since Cg support might not be present)
+ try {
+ Class cgPipelineClass = Class.forName("demos.hdr.CgPipeline");
+ pipeline = (Pipeline) cgPipelineClass.newInstance();
+ } catch (Exception e) {
+ throw new GLException(e);
+ }
+ pipeline.init();
+
+ try {
+ tonemap_fprog = pipeline.loadFragmentProgram(gl, shaderRoot + "cg/tonemap.cg");
+ blurAmount_param = pipeline.getNamedParameter(tonemap_fprog, "blurAmount");
+ windowSize_param = pipeline.getNamedParameter(tonemap_fprog, "windowSize");
+ exposure_param = pipeline.getNamedParameter(tonemap_fprog, "exposure");
+
+ if (hilo) {
+ skybox_fprog = pipeline.loadFragmentProgram(gl, shaderRoot + "cg/skybox_hilo.cg");
+ object_fprog = pipeline.loadFragmentProgram(gl, shaderRoot + "cg/object_hilo.cg");
+ } else {
+ skybox_fprog = pipeline.loadFragmentProgram(gl, shaderRoot + "cg/skybox.cg");
+ object_fprog = pipeline.loadFragmentProgram(gl, shaderRoot + "cg/object.cg");
+ }
+
+ shrink_fprog = pipeline.loadFragmentProgram(gl, shaderRoot + "cg/shrink.cg");
+
+ object_vprog = pipeline.loadVertexProgram(gl, shaderRoot + "cg/object_vp.cg");
+ modelViewProj_param = pipeline.getNamedParameter(object_vprog, "modelViewProj");
+ model_param = pipeline.getNamedParameter(object_vprog, "model");
+ eyePos_param = pipeline.getNamedParameter(object_vprog, "eyePos");
+ } catch (IOException e) {
+ throw new RuntimeException("Error loading shaders", e);
+ }
+ }
+
+ private void initARBFP(GL2 gl, int texmode) {
+ pipeline = new ARBFPPipeline(texmode);
+ pipeline.init();
+
+ try {
+ // NOTE that the program parameters are hard-coded; in the
+ // future we can use GLSL but for this demo we desire good
+ // backward compatibility
+ tonemap_fprog = pipeline.loadFragmentProgram(gl, shaderRoot + "arbfp1/tonemap.arbfp1");
+ blurAmount_param = 1;
+ windowSize_param = -1; // Not used
+ exposure_param = 2;
+
+ if (hilo) {
+ skybox_fprog = pipeline.loadFragmentProgram(gl, shaderRoot + "arbfp1/skybox_hilo.arbfp1");
+ object_fprog = pipeline.loadFragmentProgram(gl, shaderRoot + "arbfp1/object_hilo.arbfp1");
+ } else {
+ skybox_fprog = pipeline.loadFragmentProgram(gl, shaderRoot + "arbfp1/skybox.arbfp1");
+ object_fprog = pipeline.loadFragmentProgram(gl, shaderRoot + "arbfp1/object.arbfp1");
+ }
+
+ shrink_fprog = pipeline.loadFragmentProgram(gl, shaderRoot + "arbfp1/shrink.arbfp1");
+
+ object_vprog = pipeline.loadVertexProgram(gl, shaderRoot + "arbfp1/object_vp.arbvp1");
+ modelViewProj_param = 0;
+ model_param = 4;
+ eyePos_param = 8;
+ } catch (IOException e) {
+ throw new RuntimeException("Error loading shaders", e);
+ }
+ }
+
+ private void initBlurCode(GL2 gl, int blurWidth) {
+ // generate blur code
+ String blurCode = generateBlurCodeFP2(blurWidth, false);
+ blurh_fprog = loadProgram(gl, GL2.GL_FRAGMENT_PROGRAM_ARB, blurCode);
+ // printf("%s\n", blurCode);
+
+ blurCode = generateBlurCodeFP2(blurWidth, true);
+ blurv_fprog = loadProgram(gl, GL2.GL_FRAGMENT_PROGRAM_ARB, blurCode);
+ // printf("%s\n", blurCode);
+ }
+
+ private int loadProgram(GL2 gl, int target, String code) {
+ int prog_id;
+ int[] tmp = new int[1];
+ gl.glGenProgramsARB(1, tmp, 0);
+ prog_id = tmp[0];
+ gl.glBindProgramARB(target, prog_id);
+ int size = code.length();
+ gl.glProgramStringARB(target, GL2.GL_PROGRAM_FORMAT_ASCII_ARB, code.length(), code);
+ int[] errPos = new int[1];
+ gl.glGetIntegerv(GL2.GL_PROGRAM_ERROR_POSITION_ARB, errPos, 0);
+ if (errPos[0] >= 0) {
+ String kind = "Program";
+ if (target == GL2.GL_VERTEX_PROGRAM_ARB) {
+ kind = "Vertex program";
+ } else if (target == GL2.GL_FRAGMENT_PROGRAM_ARB) {
+ kind = "Fragment program";
+ }
+ System.out.println(kind + " failed to load:");
+ String errMsg = gl.glGetString(GL2.GL_PROGRAM_ERROR_STRING_ARB);
+ if (errMsg == null) {
+ System.out.println("[No error message available]");
+ } else {
+ System.out.println("Error message: \"" + errMsg + "\"");
+ }
+ System.out.println("Error occurred at position " + errPos[0] + " in program:");
+ int endPos = errPos[0];
+ while (endPos < code.length() && code.charAt(endPos) != '\n') {
+ ++endPos;
+ }
+ System.out.println(code.substring(errPos[0], endPos));
+ throw new GLException("Error loading " + kind);
+ } else {
+ if (target == GL2.GL_FRAGMENT_PROGRAM_ARB) {
+ int[] isNative = new int[1];
+ gl.glGetProgramiv( GL2.GL_FRAGMENT_PROGRAM_ARB,
+ GL2.GL_PROGRAM_UNDER_NATIVE_LIMITS_ARB,
+ isNative, 0 );
+ if (isNative[0] != 1) {
+ System.out.println("WARNING: fragment program is over native resource limits");
+ Thread.dumpStack();
+ }
+ }
+ }
+ return prog_id;
+ }
+
+ // 1d Gaussian distribution
+ private float gaussian(float x, float s) {
+ return (float) (Math.exp(-x*x/(2*s*s)) / (s*Math.sqrt(2*Math.PI)));
+ }
+
+ private void dumpWeights(int n) {
+ float s = n / 3.0f;
+ float sum = 0.0f;
+ System.err.println("gaussian weights, s = " + s + ", n = " + n);
+ for(int x=-n; x<=n; x++) {
+ float w = gaussian(x, s);
+ sum += w;
+ System.err.println("" + x + ": " + w);
+ }
+ System.err.println("sum = " + sum);
+ }
+
+ // optimized version
+ // pairs texture lookups, uses half precision
+ private String generateBlurCodeFP2(int n, boolean vertical) {
+ StringBuffer buf = new StringBuffer();
+
+ float sum = 0;
+ for(int i=-n; i<=n; i++) {
+ float weight = gaussian(3.0f*i / (float) n, 1.0f);
+ sum += weight;
+ }
+ System.err.println("sum = " + sum);
+
+ buf.append("!!ARBfp1.0\n");
+ buf.append("TEMP H0, H1, H2;\n");
+ for(int i=-n; i<=n; i+=2) {
+ float weight = gaussian(3.0f*i / (float) n, 1.0f) / sum;
+ float weight2 = gaussian(3.0f*(i+1) / (float) n, 1.0f) / sum;
+
+ int x_offset, y_offset, x_offset2, y_offset2;
+ if (vertical) {
+ x_offset = 0; x_offset2 = 0;
+ y_offset = i; y_offset2 = i+1;
+ } else {
+ x_offset = i; x_offset2 = i+1;
+ y_offset = 0; y_offset2 = 0;
+ }
+
+ // calculate texcoords
+ buf.append("ADD H0, fragment.texcoord[0], {" + x_offset + ", " + y_offset + "};\n");
+ if (i+1 <= n) {
+ buf.append("ADD H1, fragment.texcoord[0], {" + x_offset2 + ", " + y_offset2 + "};\n");
+ }
+ // do texture lookups
+ buf.append("TEX H0, H0, texture[0], RECT;\n");
+ if (i+1 <= n) {
+ buf.append("TEX H1, H1, texture[0], RECT;\n");
+ }
+
+ // accumulate results
+ if (i==-n) {
+ // first sample
+ buf.append("MUL H2, H0, {" + weight + "}.x;\n");
+ buf.append("MAD H2, H1, {" + weight2 + "}.x, H2;\n");
+ } else {
+ buf.append("MAD H2, H0, {" + weight + "}.x, H2;\n");
+ if (i+1 <= n) {
+ buf.append("MAD H2, H1, {" + weight2 + "}.x, H2;\n");
+ }
+ }
+ }
+
+ buf.append(
+ "MOV result.color, H2;\n" +
+ "END\n"
+ );
+
+ return buf.toString();
+ }
+
+ private void applyTransform(GL2 gl, Mat4f mat) {
+ float[] data = new float[16];
+ mat.getColumnMajorData(data);
+ gl.glMultMatrixf(data, 0);
+ }
+
+ private void usage() {
+ System.err.println("usage: java demos.hdr.HDR [-cg] image.hdr pbuffer_w pbuffer_h window_scale blur_width blur_decimate [obj file]");
+ shutdownDemo();
+ }
+
+ private void printThreadName(String where) {
+ System.err.println("In " + where + ": current thread = " + Thread.currentThread().getName());
+ }
+
+ private static void runExit(final Animator animator) {
+ // Note: calling System.exit() synchronously inside the draw,
+ // reshape or init callbacks can lead to deadlocks on certain
+ // platforms (in particular, X11) because the JAWT's locking
+ // routines cause a global AWT lock to be grabbed. Run the
+ // exit routine in another thread.
+ new Thread(new Runnable() {
+ public void run() {
+ animator.stop();
+ System.exit(0);
+ }
+ }).start();
+ }
+}
diff --git a/src/demos/hdr/HDRTexture.java b/src/demos/hdr/HDRTexture.java
new file mode 100755
index 0000000..e21c898
--- /dev/null
+++ b/src/demos/hdr/HDRTexture.java
@@ -0,0 +1,495 @@
+package demos.hdr;
+
+import java.io.*;
+import java.nio.*;
+
+import javax.media.opengl.*;
+
+public class HDRTexture {
+ private RGBE.Header header;
+ private byte[] m_data;
+ private float[] m_floatdata;
+ private int m_width, m_height;
+ private float m_max_r, m_max_g, m_max_b;
+ private float m_min_r, m_min_g, m_min_b;
+ private float m_max;
+ private int m_target;
+
+ public HDRTexture(String filename) throws IOException {
+ this(new FileInputStream(filename));
+ }
+
+ public HDRTexture(InputStream in) throws IOException {
+ DataInputStream datain = new DataInputStream(new BufferedInputStream(in));
+ header = RGBE.readHeader(datain);
+ m_width = header.getWidth();
+ m_height = header.getHeight();
+ m_data = new byte[m_width * m_height * 4];
+ RGBE.readPixelsRawRLE(datain, m_data, 0, m_width, m_height);
+ System.err.println("Loaded HDR image " + m_width + " x " + m_height);
+ }
+
+ public byte[] getData() { return m_data; }
+ public int getPixelIndex(int x, int y) {
+ return ((m_width * (m_height - 1 - y)) + x) * 4;
+ }
+ public float[] getFloatData() { return m_floatdata; }
+ public int getPixelFloatIndex(int x, int y) {
+ return ((m_width * (m_height - 1 - y)) + x) * 3;
+ }
+
+ public void analyze() {
+ m_max_r = m_max_g = m_max_b = 0.0f;
+ m_min_r = m_min_g = m_min_b = 1e10f;
+ int mine = 255;
+ int maxe = 0;
+
+ int ptr = 0;
+ float[] rgb = new float[3];
+ for(int i=0; i<m_width*m_height; i++) {
+ int e = m_data[ptr+3] & 0xFF;
+ if (e < mine) mine = e;
+ if (e > maxe) maxe = e;
+
+ RGBE.rgbe2float(rgb, m_data, ptr);
+ float r = rgb[0];
+ float g = rgb[1];
+ float b = rgb[2];
+ if (r > m_max_r) m_max_r = r;
+ if (g > m_max_g) m_max_g = g;
+ if (b > m_max_b) m_max_b = b;
+ if (r < m_min_r) m_min_r = r;
+ if (g < m_min_g) m_min_g = g;
+ if (b < m_min_b) m_min_b = b;
+ ptr += 4;
+ }
+ System.err.println("max intensity: " + m_max_r + " " + m_max_g + " " + m_max_b);
+ System.err.println("min intensity: " + m_min_r + " " + m_min_g + " " + m_min_b);
+ System.err.println("max e: " + maxe + " = " + RGBE.ldexp(1.0, maxe-128));
+ System.err.println("min e: " + mine + " = " + RGBE.ldexp(1.0, mine-128));
+
+ m_max = m_max_r;
+ if (m_max_g > m_max) m_max = m_max_g;
+ if (m_max_b > m_max) m_max = m_max_b;
+ System.err.println("max: " + m_max);
+ }
+
+ /** Converts from RGBE to floating-point RGB data. */
+ public void convert() {
+ m_floatdata = new float [m_width*m_height*3];
+
+ int src = 0;
+ int dest = 0;
+ float[] rgb = new float[3];
+ for(int i=0; i<m_width*m_height; i++) {
+ RGBE.rgbe2float(rgb, m_data, src);
+
+ m_floatdata[dest++] = remap(rgb[0], m_max);
+ m_floatdata[dest++] = remap(rgb[1], m_max);
+ m_floatdata[dest++] = remap(rgb[2], m_max);
+
+ src += 4;
+ }
+ }
+
+ public int create2DTextureRGBE(GL gl, int targetTextureType) {
+ m_target = targetTextureType;
+ int[] tmp = new int[1];
+ gl.glGenTextures(1, tmp, 0);
+ int texid = tmp[1];
+
+ gl.glBindTexture(m_target, texid);
+
+ gl.glTexParameteri(m_target, GL.GL_TEXTURE_MAG_FILTER, GL.GL_NEAREST);
+ gl.glTexParameteri(m_target, GL.GL_TEXTURE_MIN_FILTER, GL.GL_NEAREST);
+ gl.glTexParameteri(m_target, GL.GL_TEXTURE_WRAP_S, GL.GL_CLAMP_TO_EDGE);
+ gl.glTexParameteri(m_target, GL.GL_TEXTURE_WRAP_T, GL.GL_CLAMP_TO_EDGE);
+
+ gl.glPixelStorei(GL.GL_UNPACK_ALIGNMENT, 1);
+ gl.glTexParameteri(m_target, GL2ES1.GL_GENERATE_MIPMAP, GL.GL_TRUE);
+ gl.glTexImage2D(m_target, 0, GL.GL_RGBA, m_width, m_height, 0, GL.GL_RGBA, GL.GL_UNSIGNED_BYTE, ByteBuffer.wrap(m_data));
+
+ return texid;
+ }
+
+ public int create2DTextureHILO(GL gl, int targetTextureType, boolean rg) {
+ m_target = targetTextureType;
+ int[] tmp = new int[1];
+ gl.glGenTextures(1, tmp, 0);
+ int texid = tmp[0];
+
+ gl.glBindTexture(m_target, texid);
+
+ gl.glTexParameteri(m_target, GL.GL_TEXTURE_MAG_FILTER, GL.GL_NEAREST);
+ gl.glTexParameteri(m_target, GL.GL_TEXTURE_MIN_FILTER, GL.GL_NEAREST);
+ gl.glTexParameteri(m_target, GL.GL_TEXTURE_WRAP_S, GL.GL_CLAMP_TO_EDGE);
+ gl.glTexParameteri(m_target, GL.GL_TEXTURE_WRAP_T, GL.GL_CLAMP_TO_EDGE);
+
+ gl.glPixelStorei(GL.GL_UNPACK_ALIGNMENT, 1);
+ gl.glTexParameteri(m_target, GL2ES1.GL_GENERATE_MIPMAP, GL.GL_TRUE);
+
+ float[] img = new float [m_width * m_height * 2];
+ int src = 0;
+ int dest = 0;
+ for (int j=0; j<m_height; j++) {
+ for (int i=0; i<m_width; i++) {
+ if (rg) {
+ img[dest++] = m_floatdata[src + 0];
+ img[dest++] = m_floatdata[src + 1];
+ } else {
+ img[dest++] = m_floatdata[src + 2];
+ img[dest++] = 0;
+ }
+ src+=3;
+ }
+ }
+
+ gl.glTexImage2D(m_target, 0, GL2.GL_HILO16_NV, m_width, m_height, 0, GL2.GL_HILO_NV, GL.GL_FLOAT, FloatBuffer.wrap(img));
+
+ return texid;
+ }
+
+ // create a cubemap texture from a 2D image in cross format (thanks to Jonathon McGee)
+ public int createCubemapRGBE(GL gl) {
+ // cross is 3 faces wide, 4 faces high
+ int face_width = m_width / 3;
+ int face_height = m_height / 4;
+ byte[] face = new byte[face_width * face_height * 4];
+
+ m_target = GL.GL_TEXTURE_CUBE_MAP;
+ int[] tmp = new int[1];
+ gl.glGenTextures(1, tmp, 0);
+ int texid = tmp[0];
+ gl.glBindTexture(m_target, texid);
+
+ gl.glPixelStorei(GL.GL_UNPACK_ALIGNMENT, 1);
+ gl.glTexParameteri(m_target, GL2ES1.GL_GENERATE_MIPMAP, GL.GL_TRUE);
+
+ // gl.glTexParameteri(m_target, GL.GL_TEXTURE_MAG_FILTER, GL.GL_LINEAR);
+ // gl.glTexParameteri(m_target, GL.GL_TEXTURE_MIN_FILTER, GL.GL_LINEAR_MIPMAP_LINEAR);
+ gl.glTexParameteri(m_target, GL.GL_TEXTURE_MAG_FILTER, GL.GL_NEAREST);
+ gl.glTexParameteri(m_target, GL.GL_TEXTURE_MIN_FILTER, GL.GL_NEAREST);
+ gl.glTexParameteri(m_target, GL.GL_TEXTURE_WRAP_S, GL.GL_CLAMP_TO_EDGE);
+ gl.glTexParameteri(m_target, GL.GL_TEXTURE_WRAP_T, GL.GL_CLAMP_TO_EDGE);
+
+ // extract 6 faces
+
+ // positive Y
+ int ptr = 0;
+ for (int j=0; j<face_height; j++) {
+ for (int i=0; i<face_width; i++) {
+ int src = getPixelIndex(2 * face_width - (i + 1), 3 * face_height + j);
+ face[ptr++] = m_data[src++];
+ face[ptr++] = m_data[src++];
+ face[ptr++] = m_data[src++];
+ face[ptr++] = m_data[src++];
+ }
+ }
+ gl.glTexImage2D(GL.GL_TEXTURE_CUBE_MAP_POSITIVE_Y, 0, GL.GL_RGBA, face_width, face_height, 0, GL.GL_RGBA, GL.GL_UNSIGNED_BYTE, ByteBuffer.wrap(face));
+
+ // positive X
+ ptr = 0;
+ for (int j=0; j<face_height; j++) {
+ for (int i=0; i<face_width; i++) {
+ int src = getPixelIndex(i, m_height - (face_height + j + 1));
+ face[ptr++] = m_data[src++];
+ face[ptr++] = m_data[src++];
+ face[ptr++] = m_data[src++];
+ face[ptr++] = m_data[src++];
+ }
+ }
+ gl.glTexImage2D(GL.GL_TEXTURE_CUBE_MAP_POSITIVE_X, 0, GL.GL_RGBA, face_width, face_height, 0, GL.GL_RGBA, GL.GL_UNSIGNED_BYTE, ByteBuffer.wrap(face));
+
+ // negative Z
+ ptr = 0;
+ for (int j=0; j<face_height; j++) {
+ for (int i=0; i<face_width; i++) {
+ int src = getPixelIndex(face_width + i, m_height - (face_height + j + 1));
+ face[ptr++] = m_data[src++];
+ face[ptr++] = m_data[src++];
+ face[ptr++] = m_data[src++];
+ face[ptr++] = m_data[src++];
+ }
+ }
+ gl.glTexImage2D(GL.GL_TEXTURE_CUBE_MAP_NEGATIVE_Z, 0, GL.GL_RGBA, face_width, face_height, 0, GL.GL_RGBA, GL.GL_UNSIGNED_BYTE, ByteBuffer.wrap(face));
+
+ // negative X
+ ptr = 0;
+ for (int j=0; j<face_height; j++) {
+ for (int i=0; i<face_width; i++) {
+ int src = getPixelIndex(2 * face_width + i, m_height - (face_height + j + 1));
+ face[ptr++] = m_data[src++];
+ face[ptr++] = m_data[src++];
+ face[ptr++] = m_data[src++];
+ face[ptr++] = m_data[src++];
+ }
+ }
+ gl.glTexImage2D(GL.GL_TEXTURE_CUBE_MAP_NEGATIVE_X, 0, GL.GL_RGBA, face_width, face_height, 0, GL.GL_RGBA, GL.GL_UNSIGNED_BYTE, ByteBuffer.wrap(face));
+
+ // negative Y
+ ptr = 0;
+ for (int j=0; j<face_height; j++) {
+ for (int i=0; i<face_width; i++) {
+ int src = getPixelIndex(2 * face_width - (i + 1), face_height + j);
+ face[ptr++] = m_data[src++];
+ face[ptr++] = m_data[src++];
+ face[ptr++] = m_data[src++];
+ face[ptr++] = m_data[src++];
+ }
+ }
+ gl.glTexImage2D(GL.GL_TEXTURE_CUBE_MAP_NEGATIVE_Y, 0, GL.GL_RGBA, face_width, face_height, 0, GL.GL_RGBA, GL.GL_UNSIGNED_BYTE, ByteBuffer.wrap(face));
+
+ // positive Z
+ ptr = 0;
+ for (int j=0; j<face_height; j++) {
+ for (int i=0; i<face_width; i++) {
+ int src = getPixelIndex(2 * face_width - (i + 1), j);
+ face[ptr++] = m_data[src++];
+ face[ptr++] = m_data[src++];
+ face[ptr++] = m_data[src++];
+ face[ptr++] = m_data[src++];
+ }
+ }
+ gl.glTexImage2D(GL.GL_TEXTURE_CUBE_MAP_POSITIVE_Z, 0, GL.GL_RGBA, face_width, face_height, 0, GL.GL_RGBA, GL.GL_UNSIGNED_BYTE, ByteBuffer.wrap(face));
+
+ return texid;
+ }
+
+ public int createCubemapHILO(GL gl, boolean rg) {
+ // cross is 3 faces wide, 4 faces high
+ int face_width = m_width / 3;
+ int face_height = m_height / 4;
+ float[] face = new float [face_width * face_height * 2];
+
+ m_target = GL.GL_TEXTURE_CUBE_MAP;
+ int[] tmp = new int[1];
+ gl.glGenTextures(1, tmp, 0);
+ int texid = tmp[0];
+ gl.glBindTexture(m_target, texid);
+
+ gl.glPixelStorei(GL.GL_UNPACK_ALIGNMENT, 1);
+ gl.glTexParameteri(m_target, GL2ES1.GL_GENERATE_MIPMAP, GL.GL_TRUE);
+
+ gl.glTexParameteri(m_target, GL.GL_TEXTURE_MAG_FILTER, GL.GL_LINEAR);
+ gl.glTexParameteri(m_target, GL.GL_TEXTURE_MIN_FILTER, GL.GL_LINEAR_MIPMAP_LINEAR);
+ gl.glTexParameteri(m_target, GL.GL_TEXTURE_WRAP_S, GL.GL_CLAMP_TO_EDGE);
+ gl.glTexParameteri(m_target, GL.GL_TEXTURE_WRAP_T, GL.GL_CLAMP_TO_EDGE);
+
+ // extract 6 faces
+
+ // positive Y
+ int ptr = 0;
+ for (int j=0; j<face_height; j++) {
+ for (int i=0; i<face_width; i++) {
+ int src = getPixelFloatIndex(2 * face_width - (i + 1), 3 * face_height + j);
+ if (rg) {
+ face[ptr++] = m_floatdata[src + 0];
+ face[ptr++] = m_floatdata[src + 1];
+ } else {
+ face[ptr++] = m_floatdata[src + 2];
+ face[ptr++] = 0;
+ }
+ }
+ }
+ gl.glTexImage2D(GL.GL_TEXTURE_CUBE_MAP_POSITIVE_Y, 0, GL2.GL_HILO16_NV, face_width, face_height, 0, GL2.GL_HILO_NV, GL.GL_FLOAT, FloatBuffer.wrap(face));
+
+ // positive X
+ ptr = 0;
+ for (int j=0; j<face_height; j++) {
+ for (int i=0; i<face_width; i++) {
+ int src = getPixelFloatIndex(i, m_height - (face_height + j + 1));
+ if (rg) {
+ face[ptr++] = m_floatdata[src + 0];
+ face[ptr++] = m_floatdata[src + 1];
+ } else {
+ face[ptr++] = m_floatdata[src + 2];
+ face[ptr++] = 0;
+ }
+ }
+ }
+ gl.glTexImage2D(GL.GL_TEXTURE_CUBE_MAP_POSITIVE_X, 0, GL2.GL_HILO16_NV, face_width, face_height, 0, GL2.GL_HILO_NV, GL.GL_FLOAT, FloatBuffer.wrap(face));
+
+ // negative Z
+ ptr = 0;
+ for (int j=0; j<face_height; j++) {
+ for (int i=0; i<face_width; i++) {
+ int src = getPixelFloatIndex(face_width + i, m_height - (face_height + j + 1));
+ if (rg) {
+ face[ptr++] = m_floatdata[src + 0];
+ face[ptr++] = m_floatdata[src + 1];
+ } else {
+ face[ptr++] = m_floatdata[src + 2];
+ face[ptr++] = 0;
+ }
+ }
+ }
+ gl.glTexImage2D(GL.GL_TEXTURE_CUBE_MAP_NEGATIVE_Z, 0, GL2.GL_HILO16_NV, face_width, face_height, 0, GL2.GL_HILO_NV, GL.GL_FLOAT, FloatBuffer.wrap(face));
+
+ // negative X
+ ptr = 0;
+ for (int j=0; j<face_height; j++) {
+ for (int i=0; i<face_width; i++) {
+ int src = getPixelFloatIndex(2 * face_width + i, m_height - (face_height + j + 1));
+ if (rg) {
+ face[ptr++] = m_floatdata[src + 0];
+ face[ptr++] = m_floatdata[src + 1];
+ } else {
+ face[ptr++] = m_floatdata[src + 2];
+ face[ptr++] = 0;
+ }
+ }
+ }
+ gl.glTexImage2D(GL.GL_TEXTURE_CUBE_MAP_NEGATIVE_X, 0, GL2.GL_HILO16_NV, face_width, face_height, 0, GL2.GL_HILO_NV, GL.GL_FLOAT, FloatBuffer.wrap(face));
+
+ // negative Y
+ ptr = 0;
+ for (int j=0; j<face_height; j++) {
+ for (int i=0; i<face_width; i++) {
+ int src = getPixelFloatIndex(2 * face_width - (i + 1), face_height + j);
+ if (rg) {
+ face[ptr++] = m_floatdata[src + 0];
+ face[ptr++] = m_floatdata[src + 1];
+ } else {
+ face[ptr++] = m_floatdata[src + 2];
+ face[ptr++] = 0;
+ }
+ }
+ }
+ gl.glTexImage2D(GL.GL_TEXTURE_CUBE_MAP_NEGATIVE_Y, 0, GL2.GL_HILO16_NV, face_width, face_height, 0, GL2.GL_HILO_NV, GL.GL_FLOAT, FloatBuffer.wrap(face));
+
+ // positive Z
+ ptr = 0;
+ for (int j=0; j<face_height; j++) {
+ for (int i=0; i<face_width; i++) {
+ int src = getPixelFloatIndex(2 * face_width - (i + 1), j);
+ if (rg) {
+ face[ptr++] = m_floatdata[src + 0];
+ face[ptr++] = m_floatdata[src + 1];
+ } else {
+ face[ptr++] = m_floatdata[src + 2];
+ face[ptr++] = 0;
+ }
+ }
+ }
+ gl.glTexImage2D(GL.GL_TEXTURE_CUBE_MAP_POSITIVE_Z, 0, GL2.GL_HILO16_NV, face_width, face_height, 0, GL2.GL_HILO_NV, GL.GL_FLOAT, FloatBuffer.wrap(face));
+
+ return texid;
+ }
+
+ public int createCubemap(GL gl, int format) {
+ // cross is 3 faces wide, 4 faces high
+ int face_width = m_width / 3;
+ int face_height = m_height / 4;
+ float[] face = new float [face_width * face_height * 3];
+
+ m_target = GL.GL_TEXTURE_CUBE_MAP;
+ int[] tmp = new int[1];
+ gl.glGenTextures(1, tmp, 0);
+ int texid = tmp[0];
+ gl.glBindTexture(m_target, texid);
+
+ gl.glPixelStorei(GL.GL_UNPACK_ALIGNMENT, 1);
+ gl.glTexParameteri(m_target, GL2ES1.GL_GENERATE_MIPMAP, GL.GL_TRUE);
+
+ gl.glTexParameteri(m_target, GL.GL_TEXTURE_MAG_FILTER, GL.GL_LINEAR);
+ gl.glTexParameteri(m_target, GL.GL_TEXTURE_MIN_FILTER, GL.GL_LINEAR_MIPMAP_LINEAR);
+ gl.glTexParameteri(m_target, GL.GL_TEXTURE_WRAP_S, GL.GL_CLAMP_TO_EDGE);
+ gl.glTexParameteri(m_target, GL.GL_TEXTURE_WRAP_T, GL.GL_CLAMP_TO_EDGE);
+
+ // extract 6 faces
+
+ // positive Y
+ int ptr = 0;
+ for (int j=0; j<face_height; j++) {
+ for (int i=0; i<face_width; i++) {
+ int src = getPixelFloatIndex(2 * face_width - (i + 1), 3 * face_height + j);
+ face[ptr++] = m_floatdata[src + 0];
+ face[ptr++] = m_floatdata[src + 1];
+ face[ptr++] = m_floatdata[src + 2];
+ }
+ }
+ gl.glTexImage2D(GL.GL_TEXTURE_CUBE_MAP_POSITIVE_Y, 0, format, face_width, face_height, 0, GL.GL_RGB, GL.GL_FLOAT, FloatBuffer.wrap(face));
+
+ // positive X
+ ptr = 0;
+ for (int j=0; j<face_height; j++) {
+ for (int i=0; i<face_width; i++) {
+ int src = getPixelFloatIndex(i, m_height - (face_height + j + 1));
+ face[ptr++] = m_floatdata[src + 0];
+ face[ptr++] = m_floatdata[src + 1];
+ face[ptr++] = m_floatdata[src + 2];
+ }
+ }
+ gl.glTexImage2D(GL.GL_TEXTURE_CUBE_MAP_POSITIVE_X, 0, format, face_width, face_height, 0, GL.GL_RGB, GL.GL_FLOAT, FloatBuffer.wrap(face));
+
+ // negative Z
+ ptr = 0;
+ for (int j=0; j<face_height; j++) {
+ for (int i=0; i<face_width; i++) {
+ int src = getPixelFloatIndex(face_width + i, m_height - (face_height + j + 1));
+ face[ptr++] = m_floatdata[src + 0];
+ face[ptr++] = m_floatdata[src + 1];
+ face[ptr++] = m_floatdata[src + 2];
+ }
+ }
+ gl.glTexImage2D(GL.GL_TEXTURE_CUBE_MAP_NEGATIVE_Z, 0, format, face_width, face_height, 0, GL.GL_RGB, GL.GL_FLOAT, FloatBuffer.wrap(face));
+
+ // negative X
+ ptr = 0;
+ for (int j=0; j<face_height; j++) {
+ for (int i=0; i<face_width; i++) {
+ int src = getPixelFloatIndex(2 * face_width + i, m_height - (face_height + j + 1));
+ face[ptr++] = m_floatdata[src + 0];
+ face[ptr++] = m_floatdata[src + 1];
+ face[ptr++] = m_floatdata[src + 2];
+ }
+ }
+ gl.glTexImage2D(GL.GL_TEXTURE_CUBE_MAP_NEGATIVE_X, 0, format, face_width, face_height, 0, GL.GL_RGB, GL.GL_FLOAT, FloatBuffer.wrap(face));
+
+ // negative Y
+ ptr = 0;
+ for (int j=0; j<face_height; j++) {
+ for (int i=0; i<face_width; i++) {
+ int src = getPixelFloatIndex(2 * face_width - (i + 1), face_height + j);
+ face[ptr++] = m_floatdata[src + 0];
+ face[ptr++] = m_floatdata[src + 1];
+ face[ptr++] = m_floatdata[src + 2];
+ }
+ }
+ gl.glTexImage2D(GL.GL_TEXTURE_CUBE_MAP_NEGATIVE_Y, 0, format, face_width, face_height, 0, GL.GL_RGB, GL.GL_FLOAT, FloatBuffer.wrap(face));
+
+ // positive Z
+ ptr = 0;
+ for (int j=0; j<face_height; j++) {
+ for (int i=0; i<face_width; i++) {
+ int src = getPixelFloatIndex(2 * face_width - (i + 1), j);
+ face[ptr++] = m_floatdata[src + 0];
+ face[ptr++] = m_floatdata[src + 1];
+ face[ptr++] = m_floatdata[src + 2];
+ }
+ }
+ gl.glTexImage2D(GL.GL_TEXTURE_CUBE_MAP_POSITIVE_Z, 0, format, face_width, face_height, 0, GL.GL_RGB, GL.GL_FLOAT, FloatBuffer.wrap(face));
+
+ return texid;
+ }
+
+ //----------------------------------------------------------------------
+ // Internals only below this point
+ //
+
+ private static float remap(float x, float max) {
+ if (x > max) x = max;
+ return (float) Math.sqrt(x / max);
+ }
+
+ public static void main(String[] args) {
+ for (int i = 0; i < args.length; i++) {
+ try {
+ HDRTexture tex = new HDRTexture(args[i]);
+ tex.analyze();
+ } catch (IOException e) {
+ e.printStackTrace();
+ }
+ }
+ }
+}
diff --git a/src/demos/hdr/Pipeline.java b/src/demos/hdr/Pipeline.java
new file mode 100755
index 0000000..5307f3c
--- /dev/null
+++ b/src/demos/hdr/Pipeline.java
@@ -0,0 +1,29 @@
+package demos.hdr;
+
+import java.io.IOException;
+import javax.media.opengl.GL2ES1;
+import javax.media.opengl.GL2;
+
+
+public interface Pipeline {
+ public void init();
+ public void initFloatingPointTexture (GL2 gl, int textureObject, int w, int h);
+ public void initTexture (GL2 gl, int textureObject, int w, int h);
+ public void copyToTexture (GL2 gl, int textureObject, int w, int h);
+ public void bindTexture (GL2 gl, int textureObject);
+ public int loadVertexProgram (GL2 gl, String filename) throws IOException;
+ public int loadFragmentProgram (GL2 gl, String filename) throws IOException;
+ public void enableVertexProgram (GL2 gl, int program);
+ public void enableFragmentProgram (GL2 gl, int program);
+ public void disableVertexProgram (GL2 gl);
+ public void disableFragmentProgram (GL2 gl);
+ public int getNamedParameter (int program, String name);
+ public void setVertexProgramParameter1f (GL2 gl, int param, float val);
+ public void setVertexProgramParameter3f (GL2 gl, int param, float x, float y, float z);
+ public void setVertexProgramParameter4f (GL2 gl, int param, float x, float y, float z, float w);
+ public void setFragmentProgramParameter1f (GL2 gl, int param, float val);
+ public void setFragmentProgramParameter3f (GL2 gl, int param, float x, float y, float z);
+ public void setFragmentProgramParameter4f (GL2 gl, int param, float x, float y, float z, float w);
+ public void trackModelViewProjectionMatrix(GL2 gl, int param);
+ public void setMatrixParameterfc (GL2 gl, int param, float[] matrix);
+}
diff --git a/src/demos/hdr/RGBE.java b/src/demos/hdr/RGBE.java
new file mode 100755
index 0000000..9c83ae6
--- /dev/null
+++ b/src/demos/hdr/RGBE.java
@@ -0,0 +1,452 @@
+package demos.hdr;
+
+import java.io.*;
+import java.util.regex.*;
+
+/** This file contains code to read and write four byte rgbe file format
+ developed by Greg Ward. It handles the conversions between rgbe and
+ pixels consisting of floats. The data is assumed to be an array of floats.
+ By default there are three floats per pixel in the order red, green, blue.
+ (RGBE_DATA_??? values control this.) Only the mimimal header reading and
+ writing is implemented. Each routine does error checking and will return
+ a status value as defined below. This code is intended as a skeleton so
+ feel free to modify it to suit your needs. <P>
+
+ Ported to Java and restructured by Kenneth Russell. <BR>
+ posted to http://www.graphics.cornell.edu/~bjw/ <BR>
+ written by Bruce Walter ([email protected]) 5/26/95 <BR>
+ based on code written by Greg Ward <BR>
+*/
+
+public class RGBE {
+ // Flags indicating which fields in a Header are valid
+ private static final int VALID_PROGRAMTYPE = 0x01;
+ private static final int VALID_GAMMA = 0x02;
+ private static final int VALID_EXPOSURE = 0x04;
+
+ private static final String gammaString = "GAMMA=";
+ private static final String exposureString = "EXPOSURE=";
+
+ private static final Pattern widthHeightPattern = Pattern.compile("-Y (\\d+) \\+X (\\d+)");
+
+ public static class Header {
+ // Indicates which fields are valid
+ private int valid;
+
+ // Listed at beginning of file to identify it after "#?".
+ // Defaults to "RGBE"
+ private String programType;
+
+ // Image has already been gamma corrected with given gamma.
+ // Defaults to 1.0 (no correction)
+ private float gamma;
+
+ // A value of 1.0 in an image corresponds to <exposure>
+ // watts/steradian/m^2. Defaults to 1.0.
+ private float exposure;
+
+ // Width and height of image
+ private int width;
+ private int height;
+
+ private Header(int valid,
+ String programType,
+ float gamma,
+ float exposure,
+ int width,
+ int height) {
+ this.valid = valid;
+ this.programType = programType;
+ this.gamma = gamma;
+ this.exposure = exposure;
+ this.width = width;
+ this.height = height;
+ }
+
+ public boolean isProgramTypeValid() { return ((valid & VALID_PROGRAMTYPE) != 0); }
+ public boolean isGammaValid() { return ((valid & VALID_GAMMA) != 0); }
+ public boolean isExposureValid() { return ((valid & VALID_EXPOSURE) != 0); }
+
+ public String getProgramType() { return programType; }
+ public float getGamma() { return gamma; }
+ public float getExposure() { return exposure; }
+ public int getWidth() { return width; }
+ public int getHeight() { return height; }
+
+ public String toString() {
+ StringBuffer buf = new StringBuffer();
+ if (isProgramTypeValid()) {
+ buf.append(" Program type: ");
+ buf.append(getProgramType());
+ }
+ buf.append(" Gamma");
+ if (isGammaValid()) {
+ buf.append(" [valid]");
+ }
+ buf.append(": ");
+ buf.append(getGamma());
+ buf.append(" Exposure");
+ if (isExposureValid()) {
+ buf.append(" [valid]");
+ }
+ buf.append(": ");
+ buf.append(getExposure());
+ buf.append(" Width: ");
+ buf.append(getWidth());
+ buf.append(" Height: ");
+ buf.append(getHeight());
+ return buf.toString();
+ }
+ }
+
+ public static Header readHeader(DataInputStream in) throws IOException {
+ int valid = 0;
+ String programType = null;
+ float gamma = 1.0f;
+ float exposure = 1.0f;
+ int width = 0;
+ int height = 0;
+
+ String buf = in.readLine();
+ if (buf == null) {
+ throw new IOException("Unexpected EOF reading magic token");
+ }
+ if (buf.charAt(0) == '#' && buf.charAt(1) == '?') {
+ valid |= VALID_PROGRAMTYPE;
+ programType = buf.substring(2);
+ buf = in.readLine();
+ if (buf == null) {
+ throw new IOException("Unexpected EOF reading line after magic token");
+ }
+ }
+
+ boolean foundFormat = false;
+ boolean done = false;
+ while (!done) {
+ if (buf.equals("FORMAT=32-bit_rle_rgbe")) {
+ foundFormat = true;
+ } else if (buf.startsWith(gammaString)) {
+ valid |= VALID_GAMMA;
+ gamma = Float.parseFloat(buf.substring(gammaString.length()));
+ } else if (buf.startsWith(exposureString)) {
+ valid |= VALID_EXPOSURE;
+ exposure = Float.parseFloat(buf.substring(exposureString.length()));
+ } else {
+ Matcher m = widthHeightPattern.matcher(buf);
+ if (m.matches()) {
+ width = Integer.parseInt(m.group(2));
+ height = Integer.parseInt(m.group(1));
+ done = true;
+ }
+ }
+
+ if (!done) {
+ buf = in.readLine();
+ if (buf == null) {
+ throw new IOException("Unexpected EOF reading header");
+ }
+ }
+ }
+
+ if (!foundFormat) {
+ throw new IOException("No FORMAT specifier found");
+ }
+
+ return new Header(valid, programType, gamma, exposure, width, height);
+ }
+
+ /** Simple read routine. Will not correctly handle run length encoding. */
+ public static void readPixels(DataInputStream in, float[] data, int numpixels) throws IOException {
+ byte[] rgbe = new byte[4];
+ float[] rgb = new float[3];
+ int offset = 0;
+
+ while(numpixels-- > 0) {
+ int numRead = in.read(rgbe);
+ if (numRead < 4) {
+ throw new IOException("Only able to read " + numRead + " out of expected " + rgbe.length + " bytes");
+ }
+ rgbe2float(rgb, rgbe, 0);
+ data[offset++] = rgb[0];
+ data[offset++] = rgb[1];
+ data[offset++] = rgb[2];
+ }
+ }
+
+ public static void readPixelsRaw(DataInputStream in, byte[] data, int offset, int numpixels) throws IOException {
+ int numExpected = 4 * numpixels;
+ int numRead = in.read(data, offset, numExpected);
+ if (numRead < numExpected)
+ throw new IOException("Error reading raw pixels: got " + numRead + " bytes, expected " + numExpected);
+ }
+
+ public static void readPixelsRawRLE(DataInputStream in, byte[] data, int offset,
+ int scanline_width, int num_scanlines) throws IOException {
+ byte[] rgbe = new byte[4];
+ byte[] scanline_buffer = null;
+ int ptr, ptr_end;
+ int count;
+ byte[] buf = new byte[2];
+
+ if ((scanline_width < 8)||(scanline_width > 0x7fff)) {
+ /* run length encoding is not allowed so read flat*/
+ readPixelsRaw(in, data, offset, scanline_width*num_scanlines);
+ }
+
+ /* read in each successive scanline */
+ while (num_scanlines > 0) {
+ if (in.read(rgbe) < rgbe.length) {
+ throw new IOException("Error reading bytes: expected " + rgbe.length);
+ }
+
+ if ((rgbe[0] != 2)||(rgbe[1] != 2)||((rgbe[2] & 0x80) != 0)) {
+ /* this file is not run length encoded */
+ data[offset++] = rgbe[0];
+ data[offset++] = rgbe[1];
+ data[offset++] = rgbe[2];
+ data[offset++] = rgbe[3];
+ readPixelsRaw(in, data, offset, scanline_width*num_scanlines-1);
+ }
+
+ if ((((rgbe[2] & 0xFF)<<8) | (rgbe[3] & 0xFF)) != scanline_width) {
+ throw new IOException("Wrong scanline width " +
+ (((rgbe[2] & 0xFF)<<8) | (rgbe[3] & 0xFF)) +
+ ", expected " + scanline_width);
+ }
+
+ if (scanline_buffer == null) {
+ scanline_buffer = new byte[4*scanline_width];
+ }
+
+ ptr = 0;
+ /* read each of the four channels for the scanline into the buffer */
+ for (int i=0; i<4; i++) {
+ ptr_end = (i+1)*scanline_width;
+ while(ptr < ptr_end) {
+ if (in.read(buf) < buf.length) {
+ throw new IOException("Error reading 2-byte buffer");
+ }
+ if ((buf[0] & 0xFF) > 128) {
+ /* a run of the same value */
+ count = (buf[0] & 0xFF)-128;
+ if ((count == 0)||(count > ptr_end - ptr)) {
+ throw new IOException("Bad scanline data");
+ }
+ while(count-- > 0)
+ scanline_buffer[ptr++] = buf[1];
+ }
+ else {
+ /* a non-run */
+ count = buf[0] & 0xFF;
+ if ((count == 0)||(count > ptr_end - ptr)) {
+ throw new IOException("Bad scanline data");
+ }
+ scanline_buffer[ptr++] = buf[1];
+ if (--count > 0) {
+ if (in.read(scanline_buffer, ptr, count) < count) {
+ throw new IOException("Error reading non-run data");
+ }
+ ptr += count;
+ }
+ }
+ }
+ }
+ /* copy byte data to output */
+ for(int i = 0; i < scanline_width; i++) {
+ data[offset++] = scanline_buffer[i];
+ data[offset++] = scanline_buffer[i+scanline_width];
+ data[offset++] = scanline_buffer[i+2*scanline_width];
+ data[offset++] = scanline_buffer[i+3*scanline_width];
+ }
+ num_scanlines--;
+ }
+ }
+
+ /** Standard conversion from float pixels to rgbe pixels. */
+ public static void float2rgbe(byte[] rgbe, float red, float green, float blue) {
+ float v;
+ int e;
+
+ v = red;
+ if (green > v) v = green;
+ if (blue > v) v = blue;
+ if (v < 1e-32f) {
+ rgbe[0] = rgbe[1] = rgbe[2] = rgbe[3] = 0;
+ }
+ else {
+ FracExp fe = frexp(v);
+ v = (float) (fe.getFraction() * 256.0 / v);
+ rgbe[0] = (byte) (red * v);
+ rgbe[1] = (byte) (green * v);
+ rgbe[2] = (byte) (blue * v);
+ rgbe[3] = (byte) (fe.getExponent() + 128);
+ }
+ }
+
+ /** Standard conversion from rgbe to float pixels. Note: Ward uses
+ ldexp(col+0.5,exp-(128+8)). However we wanted pixels in the
+ range [0,1] to map back into the range [0,1]. */
+ public static void rgbe2float(float[] rgb, byte[] rgbe, int startRGBEOffset) {
+ float f;
+
+ if (rgbe[startRGBEOffset + 3] != 0) { /*nonzero pixel*/
+ f = (float) ldexp(1.0,(rgbe[startRGBEOffset + 3] & 0xFF)-(128+8));
+ rgb[0] = (rgbe[startRGBEOffset + 0] & 0xFF) * f;
+ rgb[1] = (rgbe[startRGBEOffset + 1] & 0xFF) * f;
+ rgb[2] = (rgbe[startRGBEOffset + 2] & 0xFF) * f;
+ } else {
+ rgb[0] = 0;
+ rgb[1] = 0;
+ rgb[2] = 0;
+ }
+ }
+
+ public static double ldexp(double value, int exp) {
+ if(!finite(value)||value==0.0) return value;
+ value = scalbn(value,exp);
+ // No good way to indicate errno (want to avoid throwing
+ // exceptions because don't know about stability of calculations)
+ // if(!finite(value)||value==0.0) errno = ERANGE;
+ return value;
+ }
+
+ //----------------------------------------------------------------------
+ // Internals only below this point
+ //
+
+ //----------------------------------------------------------------------
+ // Math routines, some fdlibm-derived
+ //
+
+ static class FracExp {
+ private double fraction;
+ private int exponent;
+
+ public FracExp(double fraction, int exponent) {
+ this.fraction = fraction;
+ this.exponent = exponent;
+ }
+
+ public double getFraction() { return fraction; }
+ public int getExponent() { return exponent; }
+ }
+
+ private static final double two54 = 1.80143985094819840000e+16; /* 43500000 00000000 */
+ private static final double twom54 = 5.55111512312578270212e-17; /* 0x3C900000 0x00000000 */
+ private static final double huge = 1.0e+300;
+ private static final double tiny = 1.0e-300;
+
+ private static int hi(double x) {
+ long bits = Double.doubleToRawLongBits(x);
+ return (int) (bits >>> 32);
+ }
+
+ private static int lo(double x) {
+ long bits = Double.doubleToRawLongBits(x);
+ return (int) bits;
+ }
+
+ private static double fromhilo(int hi, int lo) {
+ return Double.longBitsToDouble((((long) hi) << 32) |
+ (((long) lo) & 0xFFFFFFFFL));
+ }
+
+ private static FracExp frexp(double x) {
+ int hx = hi(x);
+ int ix = 0x7fffffff&hx;
+ int lx = lo(x);
+ int e = 0;
+ if(ix>=0x7ff00000||((ix|lx)==0))
+ return new FracExp(x, e); /* 0,inf,nan */
+ if (ix<0x00100000) { /* subnormal */
+ x *= two54;
+ hx = hi(x);
+ ix = hx&0x7fffffff;
+ e = -54;
+ }
+ e += (ix>>20)-1022;
+ hx = (hx&0x800fffff)|0x3fe00000;
+ lx = lo(x);
+ return new FracExp(fromhilo(hx, lx), e);
+ }
+
+ private static boolean finite(double x) {
+ int hx;
+ hx = hi(x);
+ return (((hx&0x7fffffff)-0x7ff00000)>>31) != 0;
+ }
+
+ /**
+ * copysign(double x, double y) <BR>
+ * copysign(x,y) returns a value with the magnitude of x and
+ * with the sign bit of y.
+ */
+ private static double copysign(double x, double y) {
+ return fromhilo((hi(x)&0x7fffffff)|(hi(y)&0x80000000), lo(x));
+ }
+
+ /**
+ * scalbn (double x, int n) <BR>
+ * scalbn(x,n) returns x* 2**n computed by exponent
+ * manipulation rather than by actually performing an
+ * exponentiation or a multiplication.
+ */
+ private static double scalbn(double x, int n) {
+ int hx = hi(x);
+ int lx = lo(x);
+ int k = (hx&0x7ff00000)>>20; /* extract exponent */
+ if (k==0) { /* 0 or subnormal x */
+ if ((lx|(hx&0x7fffffff))==0) {
+ return x; /* +-0 */
+ }
+ x *= two54;
+ hx = hi(x);
+ k = ((hx&0x7ff00000)>>20) - 54;
+ if (n< -50000) {
+ return tiny*x; /*underflow*/
+ }
+ }
+ if (k==0x7ff) {
+ return x+x; /* NaN or Inf */
+ }
+ k = k+n;
+ if (k > 0x7fe) {
+ return huge*copysign(huge,x); /* overflow */
+ }
+ if (k > 0) {
+ /* normal result */
+ return fromhilo((hx&0x800fffff)|(k<<20), lo(x));
+ }
+ if (k <= -54) {
+ if (n > 50000) {
+ /* in case integer overflow in n+k */
+ return huge*copysign(huge,x); /* overflow */
+ } else {
+ return tiny*copysign(tiny,x); /* underflow */
+ }
+ }
+ k += 54; /* subnormal result */
+ x = fromhilo((hx&0x800fffff)|(k<<20), lo(x));
+ return x*twom54;
+ }
+
+ //----------------------------------------------------------------------
+ // Test harness
+ //
+
+ public static void main(String[] args) {
+ for (int i = 0; i < args.length; i++) {
+ try {
+ DataInputStream in = new DataInputStream(new BufferedInputStream(new FileInputStream(args[i])));
+ Header header = RGBE.readHeader(in);
+ System.err.println("Header for file \"" + args[i] + "\":");
+ System.err.println(" " + header);
+ byte[] data = new byte[header.getWidth() * header.getHeight() * 4];
+ readPixelsRawRLE(in, data, 0, header.getWidth(), header.getHeight());
+ in.close();
+ } catch (IOException e) {
+ e.printStackTrace();
+ }
+ }
+ }
+}
diff --git a/src/demos/hdr/readme.txt b/src/demos/hdr/readme.txt
new file mode 100755
index 0000000..ebc5a32
--- /dev/null
+++ b/src/demos/hdr/readme.txt
@@ -0,0 +1,3 @@
+Run with e.g.
+
+java -Dsun.java2d.noddraw=true demos.hdr.HDR demos/data/images/stpeters_cross.hdr 512 384 2 7 3 demos/data/models/teapot.obj
diff --git a/src/demos/hdr/shaders/arbfp1/object.arbfp1 b/src/demos/hdr/shaders/arbfp1/object.arbfp1
new file mode 100755
index 0000000..30ee986
--- /dev/null
+++ b/src/demos/hdr/shaders/arbfp1/object.arbfp1
@@ -0,0 +1,41 @@
+!!ARBfp1.0
+# cgc version 1.3.0001, build date Aug 4 2004 10:01:10
+# command line args: -profile arbfp1
+# source file: ../cg/object.cg
+# source file: ../cg/hdr.cg
+#vendor NVIDIA Corporation
+#version 1.0.02
+#profile arbfp1
+#program main
+#semantic main.envMap : TEXUNIT0
+#var float4 In.HPosition : : : 0 : 0
+#var float4 In.P : : : 0 : 0
+#var float3 In.N : $vin.TEX1 : TEX1 : 0 : 1
+#var float3 In.I : $vin.TEX2 : TEX2 : 0 : 1
+#var samplerCUBE envMap : TEXUNIT0 : texunit 0 : 1 : 1
+#var half4 main : $vout.COL : COL : -1 : 1
+#const c[0] = 2 1 0 5
+#const c[1] = 0.9799805 0.02000427
+PARAM c[2] = { { 2, 1, 0, 5 },
+ { 0.97998047, 0.020004272 } };
+TEMP R0;
+TEMP R1;
+DP3 R0.y, fragment.texcoord[2], fragment.texcoord[2];
+RSQ R0.w, R0.y;
+DP3 R0.x, fragment.texcoord[1], fragment.texcoord[1];
+RSQ R0.x, R0.x;
+MUL R1.xyz, R0.w, fragment.texcoord[2];
+MUL R0.xyz, R0.x, fragment.texcoord[1];
+DP3 R0.w, -R1, R0;
+ADD R0.w, -R0, c[0].y;
+DP3 R1.w, R0, R1;
+MAX R0.w, R0, c[0].z;
+MUL R0.xyz, R0, R1.w;
+POW R0.w, R0.w, c[0].w;
+MAD R0.xyz, -R0, c[0].x, R1;
+MAD R0.w, R0, c[1].x, c[1].y;
+TEX R0.xyz, R0, texture[0], CUBE;
+MUL result.color.xyz, R0, R0.w;
+MOV result.color.w, c[0].y;
+END
+# 17 instructions, 2 R-regs
diff --git a/src/demos/hdr/shaders/arbfp1/object_hilo.arbfp1 b/src/demos/hdr/shaders/arbfp1/object_hilo.arbfp1
new file mode 100755
index 0000000..c6d5b55
--- /dev/null
+++ b/src/demos/hdr/shaders/arbfp1/object_hilo.arbfp1
@@ -0,0 +1,45 @@
+!!ARBfp1.0
+# cgc version 1.3.0001, build date Aug 4 2004 10:01:10
+# command line args: -profile arbfp1
+# source file: ../cg/object_hilo.cg
+# source file: ../cg/hdr.cg
+#vendor NVIDIA Corporation
+#version 1.0.02
+#profile arbfp1
+#program main
+#semantic main.envMap_rg : TEXUNIT0
+#semantic main.envMap_b : TEXUNIT1
+#var float4 In.HPosition : : : 0 : 0
+#var float4 In.P : : : 0 : 0
+#var float3 In.N : $vin.TEX1 : TEX1 : 0 : 1
+#var float3 In.I : $vin.TEX2 : TEX2 : 0 : 1
+#var samplerCUBE envMap_rg : TEXUNIT0 : texunit 0 : 1 : 1
+#var samplerCUBE envMap_b : TEXUNIT1 : texunit 1 : 2 : 1
+#var half4 main : $vout.COL : COL : -1 : 1
+#const c[0] = 2 1 0 5
+#const c[1] = 0.9799805 0.02000427
+PARAM c[2] = { { 2, 1, 0, 5 },
+ { 0.97998047, 0.020004272 } };
+TEMP R0;
+TEMP R1;
+DP3 R0.y, fragment.texcoord[2], fragment.texcoord[2];
+RSQ R0.y, R0.y;
+DP3 R0.x, fragment.texcoord[1], fragment.texcoord[1];
+MUL R1.xyz, R0.y, fragment.texcoord[2];
+RSQ R0.x, R0.x;
+MUL R0.xyz, R0.x, fragment.texcoord[1];
+DP3 R0.w, -R1, R0;
+DP3 R1.w, R0, R1;
+MUL R0.xyz, R0, R1.w;
+MAD R1.xyz, -R0, c[0].x, R1;
+ADD R0.w, -R0, c[0].y;
+MAX R0.y, R0.w, c[0].z;
+TEX R0.x, R1, texture[1], CUBE;
+POW R0.y, R0.y, c[0].w;
+MOV R0.z, R0.x;
+MAD R0.w, R0.y, c[1].x, c[1].y;
+TEX R0.xy, R1, texture[0], CUBE;
+MUL result.color.xyz, R0, R0.w;
+MOV result.color.w, c[0].y;
+END
+# 19 instructions, 2 R-regs
diff --git a/src/demos/hdr/shaders/arbfp1/object_vp.arbvp1 b/src/demos/hdr/shaders/arbfp1/object_vp.arbvp1
new file mode 100755
index 0000000..1f2a6fa
--- /dev/null
+++ b/src/demos/hdr/shaders/arbfp1/object_vp.arbvp1
@@ -0,0 +1,42 @@
+!!ARBvp1.0
+# cgc version 1.3.0001, build date Aug 4 2004 10:01:10
+# command line args: -profile arbvp1
+# source file: ../cg/object_vp.cg
+# source file: ../cg/hdr.cg
+#vendor NVIDIA Corporation
+#version 1.0.02
+#profile arbvp1
+#program main
+#semantic main.modelViewProj
+#semantic main.model
+#semantic main.eyePos
+#var float4 In.Position : $vin.POSITION : POSITION : 0 : 1
+#var float4 In.Normal : $vin.NORMAL : NORMAL : 0 : 1
+#var float4x4 modelViewProj : : c[0], 4 : 1 : 1
+#var float4x4 model : : c[4], 4 : 2 : 1
+#var float3 eyePos : : c[8] : 3 : 1
+#var float4 main.HPosition : $vout.HPOS : HPOS : -1 : 1
+#var float4 main.P : $vout.TEX0 : TEX0 : -1 : 1
+#var float3 main.N : $vout.TEX1 : TEX1 : -1 : 1
+#var float3 main.I : $vout.TEX2 : TEX2 : -1 : 1
+PARAM c[9] = { program.local[0..8] };
+TEMP R0;
+TEMP R1;
+DP4 result.position.w, vertex.position, c[3];
+DP4 result.position.z, vertex.position, c[2];
+DP4 result.position.y, vertex.position, c[1];
+DP4 result.position.x, vertex.position, c[0];
+DP4 R0.w, vertex.position, c[7];
+DP4 R0.z, vertex.position, c[6];
+DP4 R0.y, vertex.position, c[5];
+DP3 R1.z, vertex.normal, c[6];
+DP3 R1.y, vertex.normal, c[5];
+DP3 R1.x, vertex.normal, c[4];
+DP4 R0.x, vertex.position, c[4];
+DP3 R1.w, R1, R1;
+ADD result.texcoord[2].xyz, R0, -c[8];
+RSQ R1.w, R1.w;
+MOV result.texcoord[0], R0;
+MUL result.texcoord[1].xyz, R1.w, R1;
+END
+# 16 instructions, 2 R-regs
diff --git a/src/demos/hdr/shaders/arbfp1/recompile.txt b/src/demos/hdr/shaders/arbfp1/recompile.txt
new file mode 100755
index 0000000..48469f3
--- /dev/null
+++ b/src/demos/hdr/shaders/arbfp1/recompile.txt
@@ -0,0 +1,7 @@
+cgc -profile arbfp1 -o object.arbfp1 ../cg/object.cg
+cgc -profile arbfp1 -o object_hilo.arbfp1 ../cg/object_hilo.cg
+cgc -profile arbvp1 -o object_vp.arbvp1 ../cg/object_vp.cg
+cgc -profile arbfp1 -o shrink.arbfp1 ../cg/shrink.cg
+cgc -profile arbfp1 -o skybox.arbfp1 ../cg/skybox.cg
+cgc -profile arbfp1 -o skybox_hilo.arbfp1 ../cg/skybox_hilo.cg
+cgc -profile arbfp1 -o tonemap.arbfp1 ../cg/tonemap.cg
diff --git a/src/demos/hdr/shaders/arbfp1/shrink.arbfp1 b/src/demos/hdr/shaders/arbfp1/shrink.arbfp1
new file mode 100755
index 0000000..ddf7a4b
--- /dev/null
+++ b/src/demos/hdr/shaders/arbfp1/shrink.arbfp1
@@ -0,0 +1,34 @@
+!!ARBfp1.0
+# cgc version 1.3.0001, build date Aug 4 2004 10:01:10
+# command line args: -profile arbfp1
+# source file: ../cg/shrink.cg
+# source file: ../cg/hdr.cg
+#vendor NVIDIA Corporation
+#version 1.0.02
+#profile arbfp1
+#program main
+#semantic main.sceneTex : TEXUNIT0
+#var float4 In.wpos : : : 0 : 0
+#var float3 In.tex0 : $vin.TEX0 : TEX0 : 0 : 1
+#var float3 In.tex1 : : : 0 : 0
+#var float4 In.col0 : : : 0 : 0
+#var samplerRECT sceneTex : TEXUNIT0 : texunit 0 : 1 : 1
+#var half4 main : $vout.COL : COL : -1 : 1
+#const c[0] = 1 0 0.25
+PARAM c[1] = { { 1, 0, 0.25 } };
+TEMP R0;
+TEMP R1;
+TEMP R2;
+ADD R0.xy, fragment.texcoord[0], c[0];
+TEX R1, R0, texture[0], RECT;
+TEX R0, fragment.texcoord[0], texture[0], RECT;
+ADD R2, R0, R1;
+ADD R0.xy, fragment.texcoord[0], c[0].yxzw;
+ADD R1.xy, fragment.texcoord[0], c[0].x;
+TEX R0, R0, texture[0], RECT;
+TEX R1, R1, texture[0], RECT;
+ADD R0, R2, R0;
+ADD R0, R0, R1;
+MUL result.color, R0, c[0].z;
+END
+# 11 instructions, 3 R-regs
diff --git a/src/demos/hdr/shaders/arbfp1/skybox.arbfp1 b/src/demos/hdr/shaders/arbfp1/skybox.arbfp1
new file mode 100755
index 0000000..8d3d9db
--- /dev/null
+++ b/src/demos/hdr/shaders/arbfp1/skybox.arbfp1
@@ -0,0 +1,22 @@
+!!ARBfp1.0
+# cgc version 1.3.0001, build date Aug 4 2004 10:01:10
+# command line args: -profile arbfp1
+# source file: ../cg/skybox.cg
+# source file: ../cg/hdr.cg
+#vendor NVIDIA Corporation
+#version 1.0.02
+#profile arbfp1
+#program main
+#semantic main.envMap : TEXUNIT0
+#var float4 In.wpos : : : 0 : 0
+#var float3 In.tex0 : $vin.TEX0 : TEX0 : 0 : 1
+#var float3 In.tex1 : : : 0 : 0
+#var float4 In.col0 : : : 0 : 0
+#var samplerCUBE envMap : TEXUNIT0 : texunit 0 : 1 : 1
+#var half4 main : $vout.COL : COL : -1 : 1
+#const c[0] = 1
+PARAM c[1] = { { 1 } };
+TEX result.color.xyz, fragment.texcoord[0], texture[0], CUBE;
+MOV result.color.w, c[0].x;
+END
+# 2 instructions, 0 R-regs
diff --git a/src/demos/hdr/shaders/arbfp1/skybox_hilo.arbfp1 b/src/demos/hdr/shaders/arbfp1/skybox_hilo.arbfp1
new file mode 100755
index 0000000..fe0d910
--- /dev/null
+++ b/src/demos/hdr/shaders/arbfp1/skybox_hilo.arbfp1
@@ -0,0 +1,27 @@
+!!ARBfp1.0
+# cgc version 1.3.0001, build date Aug 4 2004 10:01:10
+# command line args: -profile arbfp1
+# source file: ../cg/skybox_hilo.cg
+# source file: ../cg/hdr.cg
+#vendor NVIDIA Corporation
+#version 1.0.02
+#profile arbfp1
+#program main
+#semantic main.envMap_rg : TEXUNIT0
+#semantic main.envMap_b : TEXUNIT1
+#var float4 In.wpos : : : 0 : 0
+#var float3 In.tex0 : $vin.TEX0 : TEX0 : 0 : 1
+#var float3 In.tex1 : : : 0 : 0
+#var float4 In.col0 : : : 0 : 0
+#var samplerCUBE envMap_rg : TEXUNIT0 : texunit 0 : 1 : 1
+#var samplerCUBE envMap_b : TEXUNIT1 : texunit 1 : 2 : 1
+#var half4 main : $vout.COL : COL : -1 : 1
+#const c[0] = 1
+PARAM c[1] = { { 1 } };
+TEMP R0;
+TEX R0.x, fragment.texcoord[0], texture[1], CUBE;
+TEX result.color.xy, fragment.texcoord[0], texture[0], CUBE;
+MOV result.color.z, R0.x;
+MOV result.color.w, c[0].x;
+END
+# 4 instructions, 1 R-regs
diff --git a/src/demos/hdr/shaders/arbfp1/tonemap.arbfp1 b/src/demos/hdr/shaders/arbfp1/tonemap.arbfp1
new file mode 100755
index 0000000..0dd5a5a
--- /dev/null
+++ b/src/demos/hdr/shaders/arbfp1/tonemap.arbfp1
@@ -0,0 +1,64 @@
+!!ARBfp1.0
+# cgc version 1.3.0001, build date Aug 4 2004 10:01:10
+# command line args: -profile arbfp1
+# source file: ../cg/tonemap.cg
+# source file: ../cg/hdr.cg
+#vendor NVIDIA Corporation
+#version 1.0.02
+#profile arbfp1
+#program main
+#semantic main.sceneTex : TEXUNIT0
+#semantic main.blurTex : TEXUNIT1
+#semantic main.gammaTex : TEXUNIT2
+#semantic main.vignetteTex : TEXUNIT3
+#semantic main.blurAmount
+#semantic main.windowSize
+#semantic main.exposure
+#var float4 In.wpos : : : 0 : 0
+#var float3 In.tex0 : $vin.TEX0 : TEX0 : 0 : 1
+#var float3 In.tex1 : $vin.TEX1 : TEX1 : 0 : 1
+#var float4 In.col0 : : : 0 : 0
+#var samplerRECT sceneTex : TEXUNIT0 : texunit 0 : 1 : 1
+#var samplerRECT blurTex : TEXUNIT1 : texunit 1 : 2 : 1
+#var sampler1D gammaTex : TEXUNIT2 : texunit 2 : 3 : 1
+#var samplerRECT vignetteTex : TEXUNIT3 : texunit 3 : 4 : 1
+#var float blurAmount : : c[1] : 5 : 1
+#var float4 windowSize : : : 6 : 0
+#var float exposure : : c[2] : 7 : 1
+#var half4 main : $vout.COL : COL : -1 : 1
+#const c[0] = 1 0
+PARAM c[3] = { { 1, 0 },
+ program.local[1..2] };
+TEMP R0;
+TEMP R1;
+TEMP R2;
+TEMP R3;
+TEMP R4;
+ADD R0.xy, fragment.texcoord[1], c[0].yxzw;
+ADD R1.xy, fragment.texcoord[1], c[0].x;
+TEX R1.xyz, R1, texture[1], RECT;
+TEX R0.xyz, R0, texture[1], RECT;
+ADD R3.xyz, R1, -R0;
+FRC R4.xy, fragment.texcoord[1];
+ADD R1.xy, fragment.texcoord[1], c[0];
+TEX R2.xyz, fragment.texcoord[1], texture[1], RECT;
+TEX R1.xyz, R1, texture[1], RECT;
+ADD R1.xyz, R1, -R2;
+MAD R1.xyz, R4.x, R1, R2;
+MAD R0.xyz, R4.x, R3, R0;
+ADD R2.xyz, R0, -R1;
+TEX R0.xyz, fragment.texcoord[0], texture[0], RECT;
+MAD R1.xyz, R4.y, R2, R1;
+ADD R1.xyz, R1, -R0;
+MAD R1.xyz, R1, c[1].x, R0;
+MUL R1.xyz, R1, c[2].x;
+TEX R0.xyz, fragment.texcoord[0], texture[3], RECT;
+MUL R0.xyz, R1, R0;
+TEX result.color.x, R0, texture[2], 1D;
+TEX R0.x, R0.y, texture[2], 1D;
+TEX R1.x, R0.z, texture[2], 1D;
+MOV result.color.y, R0.x;
+MOV result.color.z, R1.x;
+MOV result.color.w, c[0].x;
+END
+# 26 instructions, 5 R-regs
diff --git a/src/demos/hdr/shaders/cg/hdr.cg b/src/demos/hdr/shaders/cg/hdr.cg
new file mode 100755
index 0000000..3a0cafd
--- /dev/null
+++ b/src/demos/hdr/shaders/cg/hdr.cg
@@ -0,0 +1,130 @@
+/*
+ Cg functions to decode and filter textures in Radiance (RGBE) high dynamic range format
+ sgg 2/15/02
+
+ http://www.graphics.cornell.edu/~bjw/rgbe.html
+*/
+
+#if 0
+typedef float4 vec4;
+typedef float3 vec3;
+typedef float2 vec2;
+typedef float real;
+#define texRECT f4texRECT
+#define texCUBE f4texCUBE
+#else
+typedef half4 vec4;
+typedef half3 vec3;
+typedef half2 vec2;
+typedef half real;
+#define texRECT h4texRECT
+#define texCUBE h4texCUBE
+#endif
+
+struct fragin
+{
+ float4 wpos : WPOS;
+ float3 tex0 : TEXCOORD0;
+ float3 tex1 : TEXCOORD1;
+ float4 col0 : COLOR0;
+};
+
+// Lookup in RGBE-encoded rectangle texture
+vec3 texRECT_RGBE(uniform samplerRECT tex, float2 t)
+{
+ vec4 rgbe = texRECT(tex, t);
+ real e = (rgbe[3] * 255) - 128;
+ return rgbe.xyz * exp2(e);
+}
+
+// Lookup in RGBE-encoded cube map texture
+vec3 texCUBE_RGBE(uniform samplerCUBE tex, float3 t)
+{
+ vec4 rgbe = texCUBE(tex, t);
+ real e = (rgbe[3] * 255) - 128;
+ return rgbe.xyz * exp2(e);
+}
+
+// Lookup in RGBE-encoded rectangle texture with filtering
+vec3 texRECT_RGBE_Bilinear(uniform samplerRECT tex, half2 t)
+{
+ float2 f = frac(t);
+ vec3 t0 = texRECT_RGBE(tex, t);
+ vec3 t1 = texRECT_RGBE(tex, t + half2(1,0) );
+ vec3 t2 = lerp(t0, t1, f[0]);
+ t0 = texRECT_RGBE(tex, t + half2(0,1) );
+ t1 = texRECT_RGBE(tex, t + half2(1,1) );
+ t0 = lerp(t0, t1, f[0]);
+ t0 = lerp(t2, t0, f[1]);
+ return t0;
+}
+
+// Lookup in cubemap encoded as two HILO cube maps
+vec3 texCUBE_hilo(uniform samplerCUBE rg_tex : TEXUNIT0, uniform samplerCUBE b_tex : TEXUNIT1, float3 t)
+{
+ vec3 c;
+ c.xy = texCUBE(rg_tex, t).xy;
+ c.z = texCUBE(b_tex, t).x;
+// c = c * c;
+ return c;
+}
+
+// Lookup in rectangle texture encoded as two HILO cube maps
+vec3 texRECT_hilo(uniform samplerRECT rg_tex : TEXUNIT0, uniform samplerRECT b_tex : TEXUNIT1, float2 t)
+{
+ vec3 c;
+ c.xy = texRECT(rg_tex, t).xy;
+ c.z = texRECT(b_tex, t).x;
+ return c;
+}
+
+// bilinear lookup in float texture
+vec4 texRECT_bilinear(uniform samplerRECT tex, half2 t)
+{
+ float2 f = frac(t);
+ vec4 t0 = texRECT(tex, t);
+ vec4 t1 = texRECT(tex, t + half2(1,0) );
+ vec4 t2 = lerp(t0, t1, f[0]);
+ t0 = texRECT(tex, t + half2(0,1) );
+ t1 = texRECT(tex, t + half2(1,1) );
+ t0 = lerp(t0, t1, f[0]);
+ t0 = lerp(t2, t0, f[1]);
+ return t0;
+}
+
+// applying vignetting based on window coordinates
+void vignette(inout float3 c, float4 wpos, const float2 win_bias, const float2 win_scale)
+{
+ // convert window coord to [-1, 1] range
+ wpos.xy = (wpos.xy - win_bias) * win_scale;
+ // calculate distance from origin
+ float r = length(wpos.xy);
+ r = 1.0 - smoothstep(0.8, 1.5, r);
+ c = c * r;
+}
+
+// refraction function from Renderman spec
+// I = incident direction, N = normal, eta = relative index of refraction
+half3 my_refract(half3 I, half3 N, half eta)
+{
+ half IdotN = dot(I,N);
+ half k = 1 - eta*eta*(1 - IdotN*IdotN);
+
+ return eta*I - (eta*IdotN + sqrt(k))*N;
+}
+
+// fresnel approximation
+half my_fresnel(half3 I, half3 N, half power, half scale, half bias)
+{
+ return bias + (pow(max(0.0, 1.0 - dot(I, N)), power) * scale);
+}
+
+// transform a direction vector by a 4x4 matrix
+float3 transform_dir(float4x4 m, float3 v)
+{
+ float3 o;
+ o.x = dot(v, m._11_12_13);
+ o.y = dot(v, m._21_22_23);
+ o.z = dot(v, m._31_32_33);
+ return o;
+}
diff --git a/src/demos/hdr/shaders/cg/object.cg b/src/demos/hdr/shaders/cg/object.cg
new file mode 100755
index 0000000..a242bc6
--- /dev/null
+++ b/src/demos/hdr/shaders/cg/object.cg
@@ -0,0 +1,30 @@
+// object shader
+
+#include "hdr.cg"
+
+struct v2f
+{
+ float4 HPosition : POSITION;
+ float4 P : TEXCOORD0; // position
+ float3 N : TEXCOORD1; // normal
+ float3 I : TEXCOORD2; // incident vector
+};
+
+half4 main(v2f In,
+ uniform samplerCUBE envMap : TEXUNIT0) : COLOR
+{
+ half3 I = normalize(half3(In.I));
+ half3 N = normalize(half3(In.N));
+
+ half3 R = reflect(I, N);
+// half3 T = refract(I, N, 0.9);
+ half fresnel = my_fresnel(-I, N, 5.0, 0.98, 0.02);
+
+ half3 Creflect = texCUBE(envMap, R).rgb; // lookup reflection in HDR cube map
+// half3 Crefract = texCUBE(envMap, T).rgb; // refraction
+
+// half3 Cout = lerp(Crefract, Creflect, fresnel);
+// return half4(Cout, fresnel*0.5 + 0.5);
+
+ return half4(Creflect * fresnel, 1.0);
+}
diff --git a/src/demos/hdr/shaders/cg/object_hilo.cg b/src/demos/hdr/shaders/cg/object_hilo.cg
new file mode 100755
index 0000000..88a3348
--- /dev/null
+++ b/src/demos/hdr/shaders/cg/object_hilo.cg
@@ -0,0 +1,31 @@
+// object shader
+
+#include "hdr.cg"
+
+struct v2f
+{
+ float4 HPosition : POSITION;
+ float4 P : TEXCOORD0; // position
+ float3 N : TEXCOORD1; // normal
+ float3 I : TEXCOORD2; // incident vector
+};
+
+half4 main(v2f In,
+ uniform samplerCUBE envMap_rg : TEXUNIT0,
+ uniform samplerCUBE envMap_b : TEXUNIT1) : COLOR
+{
+ half3 I = normalize(half3(In.I));
+ half3 N = normalize(half3(In.N));
+
+ half3 R = reflect(I, N);
+// half3 T = refract(I, N, 0.9);
+ half fresnel = my_fresnel(-I, N, 5.0, 0.98, 0.02);
+
+ half3 Creflect = texCUBE_hilo(envMap_rg, envMap_b, R).rgb; // lookup reflection in HDR cube map
+// half3 Crefract = texCUBE_hilo(envMap_rg, envMap_b, T).rgb; // refraction
+
+// half3 Cout = lerp(Crefract, Creflect, fresnel);
+// return half4(Cout, 1.0);
+
+ return half4(Creflect * fresnel, 1.0);
+}
diff --git a/src/demos/hdr/shaders/cg/object_vp.cg b/src/demos/hdr/shaders/cg/object_vp.cg
new file mode 100755
index 0000000..895dc28
--- /dev/null
+++ b/src/demos/hdr/shaders/cg/object_vp.cg
@@ -0,0 +1,43 @@
+#include "hdr.cg"
+
+// application to vertex shader
+struct a2v
+{
+ float4 Position : POSITION;
+ float4 Normal : NORMAL;
+};
+
+// vertex shader to fragment shader
+struct v2f
+{
+ float4 HPosition : POSITION;
+ float4 P : TEXCOORD0; // position
+ float3 N : TEXCOORD1; // normal
+ float3 I : TEXCOORD2; // incident vector
+};
+
+v2f main(a2v In,
+ uniform float4x4 modelViewProj,
+ uniform float4x4 model,
+ uniform float3 eyePos
+ )
+{
+ v2f Out;
+
+ // transform position
+ float4 P = mul(model, In.Position);
+
+ // transform normal
+ float3 N = transform_dir(model, In.Normal.xyz);
+ N = normalize(N);
+
+ // calculate incident vector
+ float3 I = P.xyz - eyePos;
+
+ Out.P = P;
+ Out.N = N;
+ Out.I = I;
+
+ Out.HPosition = mul(modelViewProj, In.Position);
+ return Out;
+}
diff --git a/src/demos/hdr/shaders/cg/shrink.cg b/src/demos/hdr/shaders/cg/shrink.cg
new file mode 100755
index 0000000..7e01947
--- /dev/null
+++ b/src/demos/hdr/shaders/cg/shrink.cg
@@ -0,0 +1,17 @@
+// downsample float image by half
+
+#include "hdr.cg"
+
+half4 main(fragin In,
+ uniform samplerRECT sceneTex : TEXUNIT0
+ ) : COLOR
+{
+ // should calculate texcoords in vertex shader here:
+ half4 c;
+ c = texRECT(sceneTex, In.tex0.xy);
+ c = c + texRECT(sceneTex, In.tex0.xy + float2(1, 0));
+ c = c + texRECT(sceneTex, In.tex0.xy + float2(0, 1));
+ c = c + texRECT(sceneTex, In.tex0.xy + float2(1, 1));
+ c = c * 0.25;
+ return c;
+}
diff --git a/src/demos/hdr/shaders/cg/skybox.cg b/src/demos/hdr/shaders/cg/skybox.cg
new file mode 100755
index 0000000..5ca6e6f
--- /dev/null
+++ b/src/demos/hdr/shaders/cg/skybox.cg
@@ -0,0 +1,8 @@
+#include "hdr.cg"
+
+half4 main(fragin In,
+ uniform samplerCUBE envMap : TEXUNIT0) : COLOR
+{
+ half3 c = texCUBE(envMap, In.tex0).rgb;
+ return half4(c, 1.0);
+}
diff --git a/src/demos/hdr/shaders/cg/skybox_hilo.cg b/src/demos/hdr/shaders/cg/skybox_hilo.cg
new file mode 100755
index 0000000..d7392bf
--- /dev/null
+++ b/src/demos/hdr/shaders/cg/skybox_hilo.cg
@@ -0,0 +1,9 @@
+#include "hdr.cg"
+
+half4 main(fragin In,
+ uniform samplerCUBE envMap_rg : TEXUNIT0,
+ uniform samplerCUBE envMap_b : TEXUNIT1) : COLOR
+{
+ half3 c = texCUBE_hilo(envMap_rg, envMap_b, In.tex0).rgb;
+ return half4(c, 1.0);
+}
diff --git a/src/demos/hdr/shaders/cg/tonemap.cg b/src/demos/hdr/shaders/cg/tonemap.cg
new file mode 100755
index 0000000..c3d218f
--- /dev/null
+++ b/src/demos/hdr/shaders/cg/tonemap.cg
@@ -0,0 +1,37 @@
+// Tone mapping pass
+
+#include "hdr.cg"
+
+half4 main(fragin In,
+ uniform samplerRECT sceneTex : TEXUNIT0,
+ uniform samplerRECT blurTex : TEXUNIT1,
+ uniform sampler1D gammaTex : TEXUNIT2,
+ uniform samplerRECT vignetteTex : TEXUNIT3,
+ uniform float blurAmount,
+ uniform float4 windowSize,
+ uniform float exposure
+ ) : COLOR
+{
+ // sum original and blurred image
+ half3 c = lerp(texRECT(sceneTex, In.tex0.xy), texRECT_bilinear(blurTex, In.tex1.xy), blurAmount).xyz;
+
+ // exposure
+ c = c * half(exposure);
+
+ // vignette effect (makes brightness drop off with distance from center)
+// vignette(c, In.wpos, windowSize.xy, windowSize.zw);
+ c = c * texRECT(vignetteTex, In.tex0.xy).rgb;
+
+ // gamma correction
+#if 0
+ // use math
+ c = pow(c, 1.0 / 2.2);
+#else
+ // use lut
+ c.r = h1tex1D(gammaTex, c.r);
+ c.g = h1tex1D(gammaTex, c.g);
+ c.b = h1tex1D(gammaTex, c.b);
+#endif
+
+ return half4(c, 1.0);
+}