diff options
Diffstat (limited to 'LibOVR/Src/Util/Util_Render_Stereo.cpp')
-rw-r--r-- | LibOVR/Src/Util/Util_Render_Stereo.cpp | 260 |
1 files changed, 148 insertions, 112 deletions
diff --git a/LibOVR/Src/Util/Util_Render_Stereo.cpp b/LibOVR/Src/Util/Util_Render_Stereo.cpp index 1389462..b5b9b8a 100644 --- a/LibOVR/Src/Util/Util_Render_Stereo.cpp +++ b/LibOVR/Src/Util/Util_Render_Stereo.cpp @@ -5,16 +5,16 @@ Content : Stereo rendering configuration implementation Created : October 22, 2012 Authors : Michael Antonov, Andrew Reisse, Tom Forsyth -Copyright : Copyright 2014 Oculus VR, Inc. All Rights reserved. +Copyright : Copyright 2014 Oculus VR, LLC All Rights reserved. -Licensed under the Oculus VR Rift SDK License Version 3.1 (the "License"); +Licensed under the Oculus VR Rift SDK License Version 3.2 (the "License"); you may not use the Oculus VR Rift SDK except in compliance with the License, which is provided at the time of installation or download, or which otherwise accompanies this software in either electronic or hard copy form. You may obtain a copy of the License at -http://www.oculusvr.com/licenses/LICENSE-3.1 +http://www.oculusvr.com/licenses/LICENSE-3.2 Unless required by applicable law or agreed to in writing, the Oculus VR SDK distributed under the License is distributed on an "AS IS" BASIS, @@ -38,21 +38,21 @@ char const* GetDebugNameEyeCupType ( EyeCupType eyeCupType ) { switch ( eyeCupType ) { - case EyeCup_DK1A: return "DK1 A"; break; - case EyeCup_DK1B: return "DK1 B"; break; - case EyeCup_DK1C: return "DK1 C"; break; - case EyeCup_DKHD2A: return "DKHD2 A"; break; - case EyeCup_OrangeA: return "Orange A"; break; - case EyeCup_RedA: return "Red A"; break; - case EyeCup_PinkA: return "Pink A"; break; - case EyeCup_BlueA: return "Blue A"; break; - case EyeCup_Delilah1A: return "Delilah 1 A"; break; - case EyeCup_Delilah2A: return "Delilah 2 A"; break; - case EyeCup_JamesA: return "James A"; break; - case EyeCup_SunMandalaA: return "Sun Mandala A"; break; - case EyeCup_DK2A: return "DK2 A"; break; - case EyeCup_LAST: return "LAST"; break; - default: OVR_ASSERT ( false ); return "Error"; break; + case EyeCup_DK1A: return "DK1 A"; + case EyeCup_DK1B: return "DK1 B"; + case EyeCup_DK1C: return "DK1 C"; + case EyeCup_DKHD2A: return "DKHD2 A"; + case EyeCup_OrangeA: return "Orange A"; + case EyeCup_RedA: return "Red A"; + case EyeCup_PinkA: return "Pink A"; + case EyeCup_BlueA: return "Blue A"; + case EyeCup_Delilah1A: return "Delilah 1 A"; + case EyeCup_Delilah2A: return "Delilah 2 A"; + case EyeCup_JamesA: return "James A"; + case EyeCup_SunMandalaA: return "Sun Mandala A"; + case EyeCup_DK2A: return "DK2 A"; + case EyeCup_LAST: return "LAST"; + default: OVR_ASSERT ( false ); return "Error"; } } @@ -60,17 +60,17 @@ char const* GetDebugNameHmdType ( HmdTypeEnum hmdType ) { switch ( hmdType ) { - case HmdType_None: return "None"; break; - case HmdType_DK1: return "DK1"; break; - case HmdType_DKProto: return "DK1 prototype"; break; - case HmdType_DKHDProto: return "DK HD prototype 1"; break; - case HmdType_DKHDProto566Mi: return "DK HD prototype 566 Mi"; break; - case HmdType_DKHD2Proto: return "DK HD prototype 585"; break; - case HmdType_CrystalCoveProto: return "Crystal Cove"; break; - case HmdType_DK2: return "DK2"; break; - case HmdType_Unknown: return "Unknown"; break; - case HmdType_LAST: return "LAST"; break; - default: OVR_ASSERT ( false ); return "Error"; break; + case HmdType_None: return "None"; + case HmdType_DK1: return "DK1"; + case HmdType_DKProto: return "DK1 prototype"; + case HmdType_DKHDProto: return "DK HD prototype 1"; + case HmdType_DKHDProto566Mi: return "DK HD prototype 566 Mi"; + case HmdType_DKHD2Proto: return "DK HD prototype 585"; + case HmdType_CrystalCoveProto: return "Crystal Cove"; + case HmdType_DK2: return "DK2"; + case HmdType_Unknown: return "Unknown"; + case HmdType_LAST: return "LAST"; + default: OVR_ASSERT ( false ); return "Error"; } } @@ -206,7 +206,7 @@ static StereoEyeParams CalculateStereoEyeParamsInternal ( StereoEye eyeType, Hmd StereoEyeParams result; result.Eye = eyeType; - result.ViewAdjust = Matrix4f::Translation(virtualCameraOffset); + result.HmdToEyeViewOffset = Matrix4f::Translation(virtualCameraOffset); result.Distortion = distortion; result.DistortionViewport = distortedViewport; result.Fov = fov; @@ -790,6 +790,90 @@ static const int DMA_NumTrisPerEye = (DMA_GridSize)*(DMA_GridSize)*2; +DistortionMeshVertexData DistortionMeshMakeVertex ( Vector2f screenNDC, + bool rightEye, + const HmdRenderInfo &hmdRenderInfo, + const DistortionRenderDesc &distortion, const ScaleAndOffset2D &eyeToSourceNDC ) +{ + DistortionMeshVertexData result; + + float xOffset = 0.0f; + if (rightEye) + { + xOffset = 1.0f; + } + + Vector2f tanEyeAnglesR, tanEyeAnglesG, tanEyeAnglesB; + TransformScreenNDCToTanFovSpaceChroma ( &tanEyeAnglesR, &tanEyeAnglesG, &tanEyeAnglesB, + distortion, screenNDC ); + + result.TanEyeAnglesR = tanEyeAnglesR; + result.TanEyeAnglesG = tanEyeAnglesG; + result.TanEyeAnglesB = tanEyeAnglesB; + + HmdShutterTypeEnum shutterType = hmdRenderInfo.Shutter.Type; + switch ( shutterType ) + { + case HmdShutter_Global: + result.TimewarpLerp = 0.0f; + break; + case HmdShutter_RollingLeftToRight: + // Retrace is left to right - left eye goes 0.0 -> 0.5, then right goes 0.5 -> 1.0 + result.TimewarpLerp = screenNDC.x * 0.25f + 0.25f; + if (rightEye) + { + result.TimewarpLerp += 0.5f; + } + break; + case HmdShutter_RollingRightToLeft: + // Retrace is right to left - right eye goes 0.0 -> 0.5, then left goes 0.5 -> 1.0 + result.TimewarpLerp = 0.75f - screenNDC.x * 0.25f; + if (rightEye) + { + result.TimewarpLerp -= 0.5f; + } + break; + case HmdShutter_RollingTopToBottom: + // Retrace is top to bottom on both eyes at the same time. + result.TimewarpLerp = screenNDC.y * 0.5f + 0.5f; + break; + default: OVR_ASSERT ( false ); break; + } + + // When does the fade-to-black edge start? Chosen heuristically. + const float fadeOutBorderFractionTexture = 0.3f; + const float fadeOutBorderFractionTextureInnerEdge = 0.1f; + const float fadeOutBorderFractionScreen = 0.1f; + + // Fade out at texture edges. + // The furthest out will be the blue channel, because of chromatic aberration (true of any standard lens) + Vector2f sourceTexCoordBlueNDC = TransformTanFovSpaceToRendertargetNDC ( eyeToSourceNDC, tanEyeAnglesB ); + if (rightEye) + { + // The inner edge of the eye texture is usually much more magnified, because it's right against the middle of the screen, not the FOV edge. + // So we want a different scaling factor for that. This code flips the texture NDC so that +1.0 is the inner edge + sourceTexCoordBlueNDC.x = -sourceTexCoordBlueNDC.x; + } + float edgeFadeIn = ( 1.0f / fadeOutBorderFractionTextureInnerEdge ) * ( 1.0f - sourceTexCoordBlueNDC.x ) ; // Inner + edgeFadeIn = Alg::Min ( edgeFadeIn, ( 1.0f / fadeOutBorderFractionTexture ) * ( 1.0f + sourceTexCoordBlueNDC.x ) ); // Outer + edgeFadeIn = Alg::Min ( edgeFadeIn, ( 1.0f / fadeOutBorderFractionTexture ) * ( 1.0f - sourceTexCoordBlueNDC.y ) ); // Upper + edgeFadeIn = Alg::Min ( edgeFadeIn, ( 1.0f / fadeOutBorderFractionTexture ) * ( 1.0f + sourceTexCoordBlueNDC.y ) ); // Lower + + // Also fade out at screen edges. Since this is in pixel space, no need to do inner specially. + float edgeFadeInScreen = ( 1.0f / fadeOutBorderFractionScreen ) * + ( 1.0f - Alg::Max ( Alg::Abs ( screenNDC.x ), Alg::Abs ( screenNDC.y ) ) ); + edgeFadeIn = Alg::Min ( edgeFadeInScreen, edgeFadeIn ); + + // Note - this is NOT clamped negatively. + // For rendering methods that interpolate over a coarse grid, we need the values to go negative for correct intersection with zero. + result.Shade = Alg::Min ( edgeFadeIn, 1.0f ); + result.ScreenPosNDC.x = 0.5f * screenNDC.x - 0.5f + xOffset; + result.ScreenPosNDC.y = -screenNDC.y; + + return result; +} + + void DistortionMeshDestroy ( DistortionMeshVertexData *pVertices, uint16_t *pTriangleMeshIndices ) { OVR_FREE ( pVertices ); @@ -844,20 +928,9 @@ void DistortionMeshCreate( DistortionMeshVertexData **ppVertices, uint16_t **ppT return; } - // When does the fade-to-black edge start? Chosen heuristically. - const float fadeOutBorderFraction = 0.075f; // Populate vertex buffer info - float xOffset = 0.0f; - float uOffset = 0.0f; - OVR_UNUSED(uOffset); - - if (rightEye) - { - xOffset = 1.0f; - uOffset = 0.5f; - } // First pass - build up raw vertex data. DistortionMeshVertexData* pcurVert = *ppVertices; @@ -881,59 +954,8 @@ void DistortionMeshCreate( DistortionMeshVertexData **ppVertices, uint16_t **ppT screenNDC.x = Alg::Max ( -1.0f, Alg::Min ( screenNDC.x, 1.0f ) ); screenNDC.y = Alg::Max ( -1.0f, Alg::Min ( screenNDC.y, 1.0f ) ); - // From those screen positions, we then need (effectively) RGB UVs. - // This is the function that actually matters when doing the distortion calculation. - Vector2f tanEyeAnglesR, tanEyeAnglesG, tanEyeAnglesB; - TransformScreenNDCToTanFovSpaceChroma ( &tanEyeAnglesR, &tanEyeAnglesG, &tanEyeAnglesB, - distortion, screenNDC ); - - pcurVert->TanEyeAnglesR = tanEyeAnglesR; - pcurVert->TanEyeAnglesG = tanEyeAnglesG; - pcurVert->TanEyeAnglesB = tanEyeAnglesB; - - HmdShutterTypeEnum shutterType = hmdRenderInfo.Shutter.Type; - switch ( shutterType ) - { - case HmdShutter_Global: - pcurVert->TimewarpLerp = 0.0f; - break; - case HmdShutter_RollingLeftToRight: - // Retrace is left to right - left eye goes 0.0 -> 0.5, then right goes 0.5 -> 1.0 - pcurVert->TimewarpLerp = screenNDC.x * 0.25f + 0.25f; - if (rightEye) - { - pcurVert->TimewarpLerp += 0.5f; - } - break; - case HmdShutter_RollingRightToLeft: - // Retrace is right to left - right eye goes 0.0 -> 0.5, then left goes 0.5 -> 1.0 - pcurVert->TimewarpLerp = 0.75f - screenNDC.x * 0.25f; - if (rightEye) - { - pcurVert->TimewarpLerp -= 0.5f; - } - break; - case HmdShutter_RollingTopToBottom: - // Retrace is top to bottom on both eyes at the same time. - pcurVert->TimewarpLerp = screenNDC.y * 0.5f + 0.5f; - break; - default: OVR_ASSERT ( false ); break; - } - - // Fade out at texture edges. - // The furthest out will be the blue channel, because of chromatic aberration (true of any standard lens) - Vector2f sourceTexCoordBlueNDC = TransformTanFovSpaceToRendertargetNDC ( eyeToSourceNDC, tanEyeAnglesB ); - float edgeFadeIn = ( 1.0f / fadeOutBorderFraction ) * - ( 1.0f - Alg::Max ( Alg::Abs ( sourceTexCoordBlueNDC.x ), Alg::Abs ( sourceTexCoordBlueNDC.y ) ) ); - // Also fade out at screen edges. - float edgeFadeInScreen = ( 2.0f / fadeOutBorderFraction ) * - ( 1.0f - Alg::Max ( Alg::Abs ( screenNDC.x ), Alg::Abs ( screenNDC.y ) ) ); - edgeFadeIn = Alg::Min ( edgeFadeInScreen, edgeFadeIn ); - - pcurVert->Shade = Alg::Max ( 0.0f, Alg::Min ( edgeFadeIn, 1.0f ) ); - pcurVert->ScreenPosNDC.x = 0.5f * screenNDC.x - 0.5f + xOffset; - pcurVert->ScreenPosNDC.y = -screenNDC.y; - + // From those screen positions, generate the vertex. + *pcurVert = DistortionMeshMakeVertex ( screenNDC, rightEye, hmdRenderInfo, distortion, eyeToSourceNDC ); pcurVert++; } } @@ -1071,15 +1093,8 @@ void HeightmapMeshCreate( HeightmapMeshVertexData **ppVertices, uint16_t **ppTri } // Populate vertex buffer info - //float xOffset = 0.0f; Not currently used. - //float uOffset = 0.0f; - - //if (rightEye) - //{ - // xOffset = 1.0f; - // uOffset = 0.5f; - //} - + // float xOffset = (rightEye ? 1.0f : 0.0f); Currently disabled because its usage is disabled below. + // First pass - build up raw vertex data. HeightmapMeshVertexData* pcurVert = *ppVertices; @@ -1277,10 +1292,10 @@ PredictionValues PredictionGetDeviceValues ( const HmdRenderInfo &hmdRenderInfo, return result; } -Matrix4f TimewarpComputePoseDelta ( Matrix4f const &renderedViewFromWorld, Matrix4f const &predictedViewFromWorld, Matrix4f const&eyeViewAdjust ) +Matrix4f TimewarpComputePoseDelta ( Matrix4f const &renderedViewFromWorld, Matrix4f const &predictedViewFromWorld, Matrix4f const&hmdToEyeViewOffset ) { - Matrix4f worldFromPredictedView = (eyeViewAdjust * predictedViewFromWorld).InvertedHomogeneousTransform(); - Matrix4f matRenderFromNowStart = (eyeViewAdjust * renderedViewFromWorld) * worldFromPredictedView; + Matrix4f worldFromPredictedView = (hmdToEyeViewOffset * predictedViewFromWorld).InvertedHomogeneousTransform(); + Matrix4f matRenderFromNowStart = (hmdToEyeViewOffset * renderedViewFromWorld) * worldFromPredictedView; // The sensor-predicted orientations have: X=right, Y=up, Z=backwards. // The vectors inside the mesh are in NDC to keep the shader simple: X=right, Y=down, Z=forwards. @@ -1311,22 +1326,35 @@ Matrix4f TimewarpComputePoseDelta ( Matrix4f const &renderedViewFromWorld, Matri return matRenderFromNowStart; } -Matrix4f TimewarpComputePoseDeltaPosition ( Matrix4f const &renderedViewFromWorld, Matrix4f const &predictedViewFromWorld, Matrix4f const&eyeViewAdjust ) +Matrix4f TimewarpComputePoseDeltaPosition ( Matrix4f const &renderedViewFromWorld, Matrix4f const &predictedViewFromWorld, Matrix4f const&hmdToEyeViewOffset ) { - Matrix4f worldFromPredictedView = (eyeViewAdjust * predictedViewFromWorld).InvertedHomogeneousTransform(); - Matrix4f matRenderXform = (eyeViewAdjust * renderedViewFromWorld) * worldFromPredictedView; + Matrix4f worldFromPredictedView = (hmdToEyeViewOffset * predictedViewFromWorld).InvertedHomogeneousTransform(); + Matrix4f matRenderXform = (hmdToEyeViewOffset * renderedViewFromWorld) * worldFromPredictedView; return matRenderXform.Inverted(); } TimewarpMachine::TimewarpMachine() -{ + : VsyncEnabled(false), + RenderInfo(), + CurrentPredictionValues(), + DistortionTimeCount(0), + DistortionTimeCurrentStart(0.0), + //DistortionTimes[], + DistortionTimeAverage(0.f), + //EyeRenderPoses[], + LastFramePresentFlushTime(0.0), + PresentFlushToPresentFlushSeconds(0.f), + NextFramePresentFlushTime(0.0) +{ + #if defined(OVR_BUILD_DEBUG) + memset(DistortionTimes, 0, sizeof(DistortionTimes)); + #endif + for ( int i = 0; i < 2; i++ ) { EyeRenderPoses[i] = Posef(); } - DistortionTimeCount = 0; - VsyncEnabled = false; } void TimewarpMachine::Reset(HmdRenderInfo& renderInfo, bool vsyncEnabled, double timeNow) @@ -1431,6 +1459,14 @@ double TimewarpMachine::JustInTime_GetDistortionWaitUntilTime() return LastFramePresentFlushTime + (float)( CurrentPredictionValues.PresentFlushToPresentFlush - howLongBeforePresent ); } +double TimewarpMachine::JustInTime_AverageDistortionTime() +{ + if ( JustInTime_NeedDistortionTimeMeasurement() ) + { + return 0.0; + } + return DistortionTimeAverage; +} bool TimewarpMachine::JustInTime_NeedDistortionTimeMeasurement() const { |