summaryrefslogtreecommitdiffstats
path: root/LibOVR/Src/Util/Util_Render_Stereo.h
diff options
context:
space:
mode:
authorSven Gothel <[email protected]>2015-03-21 23:01:12 +0100
committerSven Gothel <[email protected]>2015-03-21 23:01:12 +0100
commit0c5c4be020c2d55540058a49b2a879f46d5a1e13 (patch)
tree00f84c2ca18cc233b826014094b9cad0769a3ea5 /LibOVR/Src/Util/Util_Render_Stereo.h
parentcbbd775b6c754927632c333ff01424a0d2048c7c (diff)
parente490c3c7f7bb5461cfa78a214827aa534fb43a3e (diff)
Merge branch 'vanilla_0.4.4' and resolve conflicts
TODO: Validate for removed patches due to relocation Resolved Conflicts: LibOVR/Src/Kernel/OVR_ThreadsWinAPI.cpp LibOVR/Src/OVR_Linux_HMDDevice.cpp LibOVR/Src/OVR_OSX_HMDDevice.cpp LibOVR/Src/OVR_Profile.cpp LibOVR/Src/OVR_Sensor2Impl.cpp LibOVR/Src/OVR_SensorFusion.cpp LibOVR/Src/OVR_SensorImpl.cpp LibOVR/Src/OVR_Win32_DeviceStatus.cpp LibOVR/Src/OVR_Win32_HIDDevice.cpp LibOVR/Src/OVR_Win32_HIDDevice.h LibOVR/Src/OVR_Win32_HMDDevice.cpp
Diffstat (limited to 'LibOVR/Src/Util/Util_Render_Stereo.h')
-rw-r--r--LibOVR/Src/Util/Util_Render_Stereo.h64
1 files changed, 33 insertions, 31 deletions
diff --git a/LibOVR/Src/Util/Util_Render_Stereo.h b/LibOVR/Src/Util/Util_Render_Stereo.h
index 326059e..2ac863c 100644
--- a/LibOVR/Src/Util/Util_Render_Stereo.h
+++ b/LibOVR/Src/Util/Util_Render_Stereo.h
@@ -1,21 +1,20 @@
/************************************************************************************
-PublicHeader: OVR.h
Filename : Util_Render_Stereo.h
Content : Sample stereo rendering configuration classes.
Created : October 22, 2012
Authors : Michael Antonov, Tom Forsyth
-Copyright : Copyright 2014 Oculus VR, Inc. All Rights reserved.
+Copyright : Copyright 2014 Oculus VR, LLC All Rights reserved.
-Licensed under the Oculus VR Rift SDK License Version 3.1 (the "License");
+Licensed under the Oculus VR Rift SDK License Version 3.2 (the "License");
you may not use the Oculus VR Rift SDK except in compliance with the License,
which is provided at the time of installation or download, or which
otherwise accompanies this software in either electronic or hard copy form.
You may obtain a copy of the License at
-http://www.oculusvr.com/licenses/LICENSE-3.1
+http://www.oculusvr.com/licenses/LICENSE-3.2
Unless required by applicable law or agreed to in writing, the Oculus VR SDK
distributed under the License is distributed on an "AS IS" BASIS,
@@ -29,13 +28,9 @@ limitations under the License.
#define OVR_Util_Render_Stereo_h
#include "../OVR_Stereo.h"
+#include "../Tracking/Tracking_SensorStateReader.h"
-
-namespace OVR {
-
-class SensorFusion;
-
-namespace Util { namespace Render {
+namespace OVR { namespace Util { namespace Render {
@@ -333,20 +328,25 @@ struct DistortionMeshVertexData
Vector2f TanEyeAnglesB;
};
+// If you just want a single point on the screen transformed.
+DistortionMeshVertexData DistortionMeshMakeVertex ( Vector2f screenNDC,
+ bool rightEye,
+ const HmdRenderInfo &hmdRenderInfo,
+ const DistortionRenderDesc &distortion, const ScaleAndOffset2D &eyeToSourceNDC );
-void DistortionMeshCreate ( DistortionMeshVertexData **ppVertices, UInt16 **ppTriangleListIndices,
+void DistortionMeshCreate ( DistortionMeshVertexData **ppVertices, uint16_t **ppTriangleListIndices,
int *pNumVertices, int *pNumTriangles,
const StereoEyeParams &stereoParams, const HmdRenderInfo &hmdRenderInfo );
-// Generate distortion mesh for a eye. This version requires less data then stereoParms, supporting
-// dynamic change in render target viewport.
-void DistortionMeshCreate( DistortionMeshVertexData **ppVertices, UInt16 **ppTriangleListIndices,
+// Generate distortion mesh for a eye.
+// This version requires less data then stereoParms, supporting dynamic change in render target viewport.
+void DistortionMeshCreate( DistortionMeshVertexData **ppVertices, uint16_t **ppTriangleListIndices,
int *pNumVertices, int *pNumTriangles,
bool rightEye,
const HmdRenderInfo &hmdRenderInfo,
const DistortionRenderDesc &distortion, const ScaleAndOffset2D &eyeToSourceNDC );
-void DistortionMeshDestroy ( DistortionMeshVertexData *pVertices, UInt16 *pTriangleMeshIndices );
+void DistortionMeshDestroy ( DistortionMeshVertexData *pVertices, uint16_t *pTriangleMeshIndices );
//-----------------------------------------------------------------------------------
@@ -368,17 +368,17 @@ struct HeightmapMeshVertexData
};
-void HeightmapMeshCreate ( HeightmapMeshVertexData **ppVertices, UInt16 **ppTriangleListIndices,
+void HeightmapMeshCreate ( HeightmapMeshVertexData **ppVertices, uint16_t **ppTriangleListIndices,
int *pNumVertices, int *pNumTriangles,
const StereoEyeParams &stereoParams, const HmdRenderInfo &hmdRenderInfo );
// Generate heightmap mesh for a eye. This version requires less data then stereoParms, supporting
// dynamic change in render target viewport.
-void HeightmapMeshCreate( HeightmapMeshVertexData **ppVertices, UInt16 **ppTriangleListIndices,
+void HeightmapMeshCreate( HeightmapMeshVertexData **ppVertices, uint16_t **ppTriangleListIndices,
int *pNumVertices, int *pNumTriangles, bool rightEye,
const HmdRenderInfo &hmdRenderInfo, const ScaleAndOffset2D &eyeToSourceNDC );
-void HeightmapMeshDestroy ( HeightmapMeshVertexData *pVertices, UInt16 *pTriangleMeshIndices );
+void HeightmapMeshDestroy ( HeightmapMeshVertexData *pVertices, uint16_t *pTriangleMeshIndices );
@@ -411,8 +411,8 @@ PredictionValues PredictionGetDeviceValues ( const HmdRenderInfo &hmdRenderInfo,
// (which may have been computed later on, and thus is more accurate), and this
// will return the matrix to pass to the timewarp distortion shader.
// TODO: deal with different handedness?
-Matrix4f TimewarpComputePoseDelta ( Matrix4f const &renderedViewFromWorld, Matrix4f const &predictedViewFromWorld, Matrix4f const&eyeViewAdjust );
-Matrix4f TimewarpComputePoseDeltaPosition ( Matrix4f const &renderedViewFromWorld, Matrix4f const &predictedViewFromWorld, Matrix4f const&eyeViewAdjust );
+Matrix4f TimewarpComputePoseDelta ( Matrix4f const &renderedViewFromWorld, Matrix4f const &predictedViewFromWorld, Matrix4f const&hmdToEyeViewOffset );
+Matrix4f TimewarpComputePoseDeltaPosition ( Matrix4f const &renderedViewFromWorld, Matrix4f const &predictedViewFromWorld, Matrix4f const&hmdToEyeViewOffset );
@@ -429,12 +429,16 @@ public:
// The only reliable time in most engines is directly after the frame-present and GPU flush-and-wait.
// This call should be done right after that to give this system the timing info it needs.
void AfterPresentAndFlush(double timeNow);
+ // But some engines queue up the frame-present and only later find out when it actually happened.
+ // They should call these two at those times.
+ void AfterPresentWithoutFlush();
+ void AfterPresentFinishes(double timeNow);
// The "average" time the rendered frame will show up,
// and the predicted pose of the HMD at that time.
// You usually only need to call one of these functions.
double GetViewRenderPredictionTime();
- Transformf GetViewRenderPredictionPose(SensorFusion &sfusion);
+ bool GetViewRenderPredictionPose(Tracking::SensorStateReader* reader, Posef& transform);
// Timewarp prediction functions. You usually only need to call one of these three sets of functions.
@@ -443,14 +447,13 @@ public:
double GetVisiblePixelTimeStart();
double GetVisiblePixelTimeEnd();
// Predicted poses of the HMD at those first and last pixels.
- Transformf GetPredictedVisiblePixelPoseStart(SensorFusion &sfusion);
- Transformf GetPredictedVisiblePixelPoseEnd (SensorFusion &sfusion);
+ bool GetPredictedVisiblePixelPoseStart(Tracking::SensorStateReader* reader, Posef& transform);
+ bool GetPredictedVisiblePixelPoseEnd(Tracking::SensorStateReader* reader, Posef& transform);
// The delta matrices to feed to the timewarp distortion code,
// given the pose that was used for rendering.
// (usually the one returned by GetViewRenderPredictionPose() earlier)
- Matrix4f GetTimewarpDeltaStart(SensorFusion &sfusion, Transformf const &renderedPose);
- Matrix4f GetTimewarpDeltaEnd (SensorFusion &sfusion, Transformf const &renderedPose);
-
+ bool GetTimewarpDeltaStart(Tracking::SensorStateReader* reader, Posef const &renderedPose, Matrix4f& transform);
+ bool GetTimewarpDeltaEnd(Tracking::SensorStateReader* reader, Posef const &renderedPose, Matrix4f& transform);
// Just-In-Time distortion aims to delay the second sensor reading & distortion
// until the very last moment to improve prediction. However, it is a little scary,
@@ -465,26 +468,25 @@ public:
bool JustInTime_NeedDistortionTimeMeasurement() const;
void JustInTime_BeforeDistortionTimeMeasurement(double timeNow);
void JustInTime_AfterDistortionTimeMeasurement(double timeNow);
-
+ double JustInTime_AverageDistortionTime(); // Just for profiling - use JustInTime_GetDistortionWaitUntilTime() for functionality.
private:
-
bool VsyncEnabled;
HmdRenderInfo RenderInfo;
PredictionValues CurrentPredictionValues;
- enum { NumDistortionTimes = 10 };
+ enum { NumDistortionTimes = 100 };
int DistortionTimeCount;
double DistortionTimeCurrentStart;
float DistortionTimes[NumDistortionTimes];
float DistortionTimeAverage;
// Pose at which last time the eye was rendered.
- Transformf EyeRenderPoses[2];
+ Posef EyeRenderPoses[2];
// Absolute time of the last present+flush
double LastFramePresentFlushTime;
- // Seconds between presentflushes
+ // Seconds between present+flushes
float PresentFlushToPresentFlushSeconds;
// Predicted absolute time of the next present+flush
double NextFramePresentFlushTime;