aboutsummaryrefslogtreecommitdiffstats
path: root/LibOVR/Src/Util
diff options
context:
space:
mode:
authorBrad Davis <[email protected]>2013-07-03 09:16:03 -0700
committerBrad Davis <[email protected]>2013-07-03 09:16:03 -0700
commitd46694c91c2bec4eb1e282c0c0101e6dab26e082 (patch)
treeeb5fba71edf1aedc0d6af9406881004289433b20 /LibOVR/Src/Util
parent7fa8be4bc565adc9911c95c814480cc48bf2d13c (diff)
SDK 0.2.3
Diffstat (limited to 'LibOVR/Src/Util')
-rw-r--r--LibOVR/Src/Util/Util_LatencyTest.cpp1131
-rw-r--r--LibOVR/Src/Util/Util_LatencyTest.h320
-rw-r--r--LibOVR/Src/Util/Util_MagCalibration.cpp360
-rw-r--r--LibOVR/Src/Util/Util_MagCalibration.h236
-rw-r--r--LibOVR/Src/Util/Util_Render_Stereo.cpp625
-rw-r--r--LibOVR/Src/Util/Util_Render_Stereo.h599
6 files changed, 1646 insertions, 1625 deletions
diff --git a/LibOVR/Src/Util/Util_LatencyTest.cpp b/LibOVR/Src/Util/Util_LatencyTest.cpp
index 1a1c303..57c2769 100644
--- a/LibOVR/Src/Util/Util_LatencyTest.cpp
+++ b/LibOVR/Src/Util/Util_LatencyTest.cpp
@@ -1,560 +1,571 @@
-/************************************************************************************
-
-Filename : Util_LatencyTest.cpp
-Content : Wraps the lower level LatencyTester interface and adds functionality.
-Created : February 14, 2013
-Authors : Lee Cooper
-
-Copyright : Copyright 2013 Oculus VR, Inc. All Rights reserved.
-
-Use of this software is subject to the terms of the Oculus license
-agreement provided at the time of installation or download, or which
-otherwise accompanies this software in either electronic or hard copy form.
-
-*************************************************************************************/
-
-#include "Util_LatencyTest.h"
-
-#include "../Kernel/OVR_Log.h"
-#include "../Kernel/OVR_Timer.h"
-
-namespace OVR { namespace Util {
-
-static const UInt32 TIME_TO_WAIT_FOR_SETTLE_PRE_CALIBRATION = 16*10;
-static const UInt32 TIME_TO_WAIT_FOR_SETTLE_POST_CALIBRATION = 16*10;
-static const UInt32 TIME_TO_WAIT_FOR_SETTLE_POST_MEASUREMENT = 16*5;
-static const UInt32 TIME_TO_WAIT_FOR_SETTLE_POST_MEASUREMENT_RANDOMNESS = 16*5;
-static const UInt32 DEFAULT_NUMBER_OF_SAMPLES = 10; // For both color 1->2 and color 2->1 transitions.
-static const UInt32 INITIAL_SAMPLES_TO_IGNORE = 4;
-static const UInt32 TIMEOUT_WAITING_FOR_TEST_STARTED = 1000;
-static const UInt32 TIMEOUT_WAITING_FOR_COLOR_DETECTED = 4000;
-static const Color CALIBRATE_BLACK(0, 0, 0);
-static const Color CALIBRATE_WHITE(255, 255, 255);
-static const Color COLOR1(0, 0, 0);
-static const Color COLOR2(255, 255, 255);
-static const Color SENSOR_DETECT_THRESHOLD(128, 255, 255);
-static const float BIG_FLOAT = 1000000.0f;
-static const float SMALL_FLOAT = -1000000.0f;
-
-//-------------------------------------------------------------------------------------
-// ***** LatencyTest
-
-LatencyTest::LatencyTest(LatencyTestDevice* device)
- : Handler(getThis())
-{
- if (device != NULL)
- {
- SetDevice(device);
- }
-
- reset();
-
- srand(Timer::GetTicksMs());
-}
-
-LatencyTest::~LatencyTest()
-{
- clearMeasurementResults();
-}
-
-bool LatencyTest::SetDevice(LatencyTestDevice* device)
-{
-
- if (device != Device)
- {
- if (device != NULL)
- {
- if (device->GetMessageHandler() != NULL)
- {
- OVR_DEBUG_LOG(
- ("LatencyTest::AttachToDevice failed - device %p already has handler", device));
- return false;
- }
- }
-
- if (Device != NULL)
- {
- Device->SetMessageHandler(0);
- }
- Device = device;
-
- if (Device != NULL)
- {
- Device->SetMessageHandler(&Handler);
-
- // Set trigger threshold.
- LatencyTestConfiguration configuration(SENSOR_DETECT_THRESHOLD, false); // No samples streaming.
- Device->SetConfiguration(configuration, true);
- }
- }
-
- return true;
-}
-
-UInt32 LatencyTest::getRandomComponent(UInt32 range)
-{
- UInt32 val = rand() % range;
- return val;
-}
-
-void LatencyTest::BeginTest()
-{
- if (State == State_WaitingForButton)
- {
- // Set color to black and wait a while.
- RenderColor = CALIBRATE_BLACK;
-
- State = State_WaitingForSettlePreCalibrationColorBlack;
- OVR_DEBUG_LOG(("State_WaitingForButton -> State_WaitingForSettlePreCalibrationColorBlack."));
-
- setTimer(TIME_TO_WAIT_FOR_SETTLE_PRE_CALIBRATION);
- }
-}
-
-void LatencyTest::handleMessage(const Message& msg, LatencyTestMessageType latencyTestMessage)
-{
- // For debugging.
-/* if (msg.Type == Message_LatencyTestSamples)
- {
- MessageLatencyTestSamples* pSamples = (MessageLatencyTestSamples*) &msg;
-
- if (pSamples->Samples.GetSize() > 0)
- {
- // Just show the first one for now.
- Color c = pSamples->Samples[0];
- OVR_DEBUG_LOG(("%d %d %d", c.R, c.G, c.B));
- }
- return;
- }
-*/
-
- if (latencyTestMessage == LatencyTest_Timer)
- {
- if (!Device)
- {
- reset();
- return;
- }
-
- if (State == State_WaitingForSettlePreCalibrationColorBlack)
- {
- // Send calibrate message to device and wait a while.
- LatencyTestCalibrate calibrate(CALIBRATE_BLACK);
- Device->SetCalibrate(calibrate);
-
- State = State_WaitingForSettlePostCalibrationColorBlack;
- OVR_DEBUG_LOG(("State_WaitingForSettlePreCalibrationColorBlack -> State_WaitingForSettlePostCalibrationColorBlack."));
-
- setTimer(TIME_TO_WAIT_FOR_SETTLE_POST_CALIBRATION);
- }
- else if (State == State_WaitingForSettlePostCalibrationColorBlack)
- {
- // Change color to white and wait a while.
- RenderColor = CALIBRATE_WHITE;
-
- State = State_WaitingForSettlePreCalibrationColorWhite;
- OVR_DEBUG_LOG(("State_WaitingForSettlePostCalibrationColorBlack -> State_WaitingForSettlePreCalibrationColorWhite."));
-
- setTimer(TIME_TO_WAIT_FOR_SETTLE_PRE_CALIBRATION);
- }
- else if (State == State_WaitingForSettlePreCalibrationColorWhite)
- {
- // Send calibrate message to device and wait a while.
- LatencyTestCalibrate calibrate(CALIBRATE_WHITE);
- Device->SetCalibrate(calibrate);
-
- State = State_WaitingForSettlePostCalibrationColorWhite;
- OVR_DEBUG_LOG(("State_WaitingForSettlePreCalibrationColorWhite -> State_WaitingForSettlePostCalibrationColorWhite."));
-
- setTimer(TIME_TO_WAIT_FOR_SETTLE_POST_CALIBRATION);
- }
- else if (State == State_WaitingForSettlePostCalibrationColorWhite)
- {
- // Calibration is done. Switch to color 1 and wait for it to settle.
- RenderColor = COLOR1;
-
- State = State_WaitingForSettlePostMeasurement;
- OVR_DEBUG_LOG(("State_WaitingForSettlePostCalibrationColorWhite -> State_WaitingForSettlePostMeasurement."));
-
- UInt32 waitTime = TIME_TO_WAIT_FOR_SETTLE_POST_MEASUREMENT + getRandomComponent(TIME_TO_WAIT_FOR_SETTLE_POST_MEASUREMENT_RANDOMNESS);
- setTimer(waitTime);
- }
- else if (State == State_WaitingForSettlePostMeasurement)
- {
- // Prepare for next measurement.
-
- // Create a new result object.
- MeasurementResult* pResult = new MeasurementResult();
- Results.PushBack(pResult);
-
- State = State_WaitingToTakeMeasurement;
- OVR_DEBUG_LOG(("State_WaitingForSettlePostMeasurement -> State_WaitingToTakeMeasurement."));
- }
- else if (State == State_WaitingForTestStarted)
- {
- // We timed out waiting for 'TestStarted'. Abandon this measurement and setup for the next.
- getActiveResult()->TimedOutWaitingForTestStarted = true;
-
- State = State_WaitingForSettlePostMeasurement;
- OVR_DEBUG_LOG(("** Timed out waiting for 'TestStarted'."));
- OVR_DEBUG_LOG(("State_WaitingForTestStarted -> State_WaitingForSettlePostMeasurement."));
-
- UInt32 waitTime = TIME_TO_WAIT_FOR_SETTLE_POST_MEASUREMENT + getRandomComponent(TIME_TO_WAIT_FOR_SETTLE_POST_MEASUREMENT_RANDOMNESS);
- setTimer(waitTime);
- }
- else if (State == State_WaitingForColorDetected)
- {
- // We timed out waiting for 'ColorDetected'. Abandon this measurement and setup for the next.
- getActiveResult()->TimedOutWaitingForColorDetected = true;
-
- State = State_WaitingForSettlePostMeasurement;
- OVR_DEBUG_LOG(("** Timed out waiting for 'ColorDetected'."));
- OVR_DEBUG_LOG(("State_WaitingForColorDetected -> State_WaitingForSettlePostMeasurement."));
-
- UInt32 waitTime = TIME_TO_WAIT_FOR_SETTLE_POST_MEASUREMENT + getRandomComponent(TIME_TO_WAIT_FOR_SETTLE_POST_MEASUREMENT_RANDOMNESS);
- setTimer(waitTime);
- }
- }
- else if (latencyTestMessage == LatencyTest_ProcessInputs)
- {
- if (State == State_WaitingToTakeMeasurement)
- {
- if (!Device)
- {
- reset();
- return;
- }
-
- // Send 'StartTest' feature report with opposite target color.
- if (RenderColor == COLOR1)
- {
- RenderColor = COLOR2;
- }
- else
- {
- RenderColor = COLOR1;
- }
-
- getActiveResult()->TargetColor = RenderColor;
-
- // Record time so we can determine usb roundtrip time.
- getActiveResult()->StartTestTicksMicroS = Timer::GetTicks();
-
- LatencyTestStartTest startTest(RenderColor);
- Device->SetStartTest(startTest);
-
- State = State_WaitingForTestStarted;
- OVR_DEBUG_LOG(("State_WaitingToTakeMeasurement -> State_WaitingForTestStarted."));
-
- setTimer(TIMEOUT_WAITING_FOR_TEST_STARTED);
- }
- }
- else if (msg.Type == Message_LatencyTestButton)
- {
- BeginTest();
- }
- else if (msg.Type == Message_LatencyTestStarted)
- {
- if (State == State_WaitingForTestStarted)
- {
- clearTimer();
-
- // Record time so we can determine usb roundtrip time.
- getActiveResult()->TestStartedTicksMicroS = Timer::GetTicks();
-
- State = State_WaitingForColorDetected;
- OVR_DEBUG_LOG(("State_WaitingForTestStarted -> State_WaitingForColorDetected."));
-
- setTimer(TIMEOUT_WAITING_FOR_COLOR_DETECTED);
- }
- }
- else if (msg.Type == Message_LatencyTestColorDetected)
- {
- if (State == State_WaitingForColorDetected)
- {
- // Record time to detect color.
- MessageLatencyTestColorDetected* pDetected = (MessageLatencyTestColorDetected*) &msg;
- UInt16 elapsedTime = pDetected->Elapsed;
- OVR_DEBUG_LOG(("Time to 'ColorDetected' = %d", elapsedTime));
-
- getActiveResult()->DeviceMeasuredElapsedMilliS = elapsedTime;
-
- if (areResultsComplete())
- {
- // We're done.
- processResults();
- reset();
- }
- else
- {
- // Run another measurement.
- State = State_WaitingForSettlePostMeasurement;
- OVR_DEBUG_LOG(("State_WaitingForColorDetected -> State_WaitingForSettlePostMeasurement."));
-
- UInt32 waitTime = TIME_TO_WAIT_FOR_SETTLE_POST_MEASUREMENT + getRandomComponent(TIME_TO_WAIT_FOR_SETTLE_POST_MEASUREMENT_RANDOMNESS);
- setTimer(waitTime);
- }
- }
- }
- else if (msg.Type == Message_DeviceRemoved)
- {
- reset();
- }
-}
-
-LatencyTest::MeasurementResult* LatencyTest::getActiveResult()
-{
- OVR_ASSERT(!Results.IsEmpty());
- return Results.GetLast();
-}
-
-void LatencyTest::setTimer(UInt32 timeMilliS)
-{
- ActiveTimerMilliS = timeMilliS;
-}
-
-void LatencyTest::clearTimer()
-{
- ActiveTimerMilliS = 0;
-}
-
-void LatencyTest::reset()
-{
- clearMeasurementResults();
- State = State_WaitingForButton;
-
- HaveOldTime = false;
- ActiveTimerMilliS = 0;
-}
-
-void LatencyTest::clearMeasurementResults()
-{
- while(!Results.IsEmpty())
- {
- MeasurementResult* pElem = Results.GetFirst();
- pElem->RemoveNode();
- delete pElem;
- }
-}
-
-LatencyTest::LatencyTestHandler::~LatencyTestHandler()
-{
- RemoveHandlerFromDevices();
-}
-
-void LatencyTest::LatencyTestHandler::OnMessage(const Message& msg)
-{
- pLatencyTestUtil->handleMessage(msg);
-}
-
-void LatencyTest::ProcessInputs()
-{
- updateForTimeouts();
- handleMessage(Message(), LatencyTest_ProcessInputs);
-}
-
-bool LatencyTest::DisplayScreenColor(Color& colorToDisplay)
-{
- updateForTimeouts();
-
- if (State == State_WaitingForButton)
- {
- return false;
- }
-
- colorToDisplay = RenderColor;
- return true;
-}
-
-const char* LatencyTest::GetResultsString()
-{
- if (!ResultsString.IsEmpty() && ReturnedResultString != ResultsString.ToCStr())
- {
- ReturnedResultString = ResultsString;
- return ReturnedResultString.ToCStr();
- }
-
- return NULL;
-}
-
-bool LatencyTest::areResultsComplete()
-{
- UInt32 initialMeasurements = 0;
-
- UInt32 measurements1to2 = 0;
- UInt32 measurements2to1 = 0;
-
- MeasurementResult* pCurr = Results.GetFirst();
- while(true)
- {
- // Process.
- if (!pCurr->TimedOutWaitingForTestStarted &&
- !pCurr->TimedOutWaitingForColorDetected)
- {
- initialMeasurements++;
-
- if (initialMeasurements > INITIAL_SAMPLES_TO_IGNORE)
- {
- if (pCurr->TargetColor == COLOR2)
- {
- measurements1to2++;
- }
- else
- {
- measurements2to1++;
- }
- }
- }
-
- if (Results.IsLast(pCurr))
- {
- break;
- }
- pCurr = Results.GetNext(pCurr);
- }
-
- if (measurements1to2 >= DEFAULT_NUMBER_OF_SAMPLES &&
- measurements2to1 >= DEFAULT_NUMBER_OF_SAMPLES)
- {
- return true;
- }
-
- return false;
-}
-
-void LatencyTest::processResults()
-{
-
- UInt32 minTime1To2 = UINT_MAX;
- UInt32 maxTime1To2 = 0;
- float averageTime1To2 = 0.0f;
- UInt32 minTime2To1 = UINT_MAX;
- UInt32 maxTime2To1 = 0;
- float averageTime2To1 = 0.0f;
-
- float minUSBTripMilliS = BIG_FLOAT;
- float maxUSBTripMilliS = SMALL_FLOAT;
- float averageUSBTripMilliS = 0.0f;
- UInt32 countUSBTripTime = 0;
-
- UInt32 measurementsCount = 0;
- UInt32 measurements1to2 = 0;
- UInt32 measurements2to1 = 0;
-
- MeasurementResult* pCurr = Results.GetFirst();
- UInt32 count = 0;
- while(true)
- {
- count++;
-
- if (!pCurr->TimedOutWaitingForTestStarted &&
- !pCurr->TimedOutWaitingForColorDetected)
- {
- measurementsCount++;
-
- if (measurementsCount > INITIAL_SAMPLES_TO_IGNORE)
- {
- if (pCurr->TargetColor == COLOR2)
- {
- measurements1to2++;
-
- if (measurements1to2 <= DEFAULT_NUMBER_OF_SAMPLES)
- {
- UInt32 elapsed = pCurr->DeviceMeasuredElapsedMilliS;
-
- minTime1To2 = Alg::Min(elapsed, minTime1To2);
- maxTime1To2 = Alg::Max(elapsed, maxTime1To2);
-
- averageTime1To2 += (float) elapsed;
- }
- }
- else
- {
- measurements2to1++;
-
- if (measurements2to1 <= DEFAULT_NUMBER_OF_SAMPLES)
- {
- UInt32 elapsed = pCurr->DeviceMeasuredElapsedMilliS;
-
- minTime2To1 = Alg::Min(elapsed, minTime2To1);
- maxTime2To1 = Alg::Max(elapsed, maxTime2To1);
-
- averageTime2To1 += (float) elapsed;
- }
- }
-
- float usbRountripElapsedMilliS = 0.001f * (float) (pCurr->TestStartedTicksMicroS - pCurr->StartTestTicksMicroS);
- minUSBTripMilliS = Alg::Min(usbRountripElapsedMilliS, minUSBTripMilliS);
- maxUSBTripMilliS = Alg::Max(usbRountripElapsedMilliS, maxUSBTripMilliS);
- averageUSBTripMilliS += usbRountripElapsedMilliS;
- countUSBTripTime++;
- }
- }
-
- if (measurements1to2 >= DEFAULT_NUMBER_OF_SAMPLES &&
- measurements2to1 >= DEFAULT_NUMBER_OF_SAMPLES)
- {
- break;
- }
-
- if (Results.IsLast(pCurr))
- {
- break;
- }
- pCurr = Results.GetNext(pCurr);
- }
-
- averageTime1To2 /= (float) DEFAULT_NUMBER_OF_SAMPLES;
- averageTime2To1 /= (float) DEFAULT_NUMBER_OF_SAMPLES;
-
- averageUSBTripMilliS /= countUSBTripTime;
-
- float finalResult = 0.5f * (averageTime1To2 + averageTime2To1);
- finalResult += averageUSBTripMilliS;
-
- ResultsString.Clear();
- ResultsString.AppendFormat("RESULT=%.1f (add half Tracker period) [b->w %d|%.1f|%d] [w->b %d|%.1f|%d] [usb rndtrp %.1f|%.1f|%.1f] [cnt %d] [tmouts %d]",
- finalResult,
- minTime1To2, averageTime1To2, maxTime1To2,
- minTime2To1, averageTime2To1, maxTime2To1,
- minUSBTripMilliS, averageUSBTripMilliS, maxUSBTripMilliS,
- DEFAULT_NUMBER_OF_SAMPLES*2, count - measurementsCount);
-}
-
-void LatencyTest::updateForTimeouts()
-{
- if (!HaveOldTime)
- {
- HaveOldTime = true;
- OldTime = Timer::GetTicksMs();
- return;
- }
-
- UInt32 newTime = Timer::GetTicksMs();
- UInt32 elapsedMilliS = newTime - OldTime;
- if (newTime < OldTime)
- {
- elapsedMilliS = OldTime - newTime;
- elapsedMilliS = UINT_MAX - elapsedMilliS;
- }
- OldTime = newTime;
-
- elapsedMilliS = Alg::Min(elapsedMilliS, (UInt32) 100); // Clamp at 100mS in case we're not being called very often.
-
-
- if (ActiveTimerMilliS == 0)
- {
- return;
- }
-
- if (elapsedMilliS >= ActiveTimerMilliS)
- {
- ActiveTimerMilliS = 0;
- handleMessage(Message(), LatencyTest_Timer);
- return;
- }
-
- ActiveTimerMilliS -= elapsedMilliS;
-}
-
-}} // namespace OVR::Util
+/************************************************************************************
+
+Filename : Util_LatencyTest.cpp
+Content : Wraps the lower level LatencyTester interface and adds functionality.
+Created : February 14, 2013
+Authors : Lee Cooper
+
+Copyright : Copyright 2013 Oculus VR, Inc. All Rights reserved.
+
+Use of this software is subject to the terms of the Oculus license
+agreement provided at the time of installation or download, or which
+otherwise accompanies this software in either electronic or hard copy form.
+
+*************************************************************************************/
+
+#include "Util_LatencyTest.h"
+
+#include "../Kernel/OVR_Log.h"
+#include "../Kernel/OVR_Timer.h"
+
+namespace OVR { namespace Util {
+
+static const UInt32 TIME_TO_WAIT_FOR_SETTLE_PRE_CALIBRATION = 16*10;
+static const UInt32 TIME_TO_WAIT_FOR_SETTLE_POST_CALIBRATION = 16*10;
+static const UInt32 TIME_TO_WAIT_FOR_SETTLE_POST_MEASUREMENT = 16*5;
+static const UInt32 TIME_TO_WAIT_FOR_SETTLE_POST_MEASUREMENT_RANDOMNESS = 16*5;
+static const UInt32 DEFAULT_NUMBER_OF_SAMPLES = 10; // For both color 1->2 and color 2->1 transitions.
+static const UInt32 INITIAL_SAMPLES_TO_IGNORE = 4;
+static const UInt32 TIMEOUT_WAITING_FOR_TEST_STARTED = 1000;
+static const UInt32 TIMEOUT_WAITING_FOR_COLOR_DETECTED = 4000;
+static const Color CALIBRATE_BLACK(0, 0, 0);
+static const Color CALIBRATE_WHITE(255, 255, 255);
+static const Color COLOR1(0, 0, 0);
+static const Color COLOR2(255, 255, 255);
+static const Color SENSOR_DETECT_THRESHOLD(128, 255, 255);
+static const float BIG_FLOAT = 1000000.0f;
+static const float SMALL_FLOAT = -1000000.0f;
+
+//-------------------------------------------------------------------------------------
+// ***** LatencyTest
+
+LatencyTest::LatencyTest(LatencyTestDevice* device)
+ : Handler(getThis())
+{
+ if (device != NULL)
+ {
+ SetDevice(device);
+ }
+
+ reset();
+
+ srand(Timer::GetTicksMs());
+}
+
+LatencyTest::~LatencyTest()
+{
+ clearMeasurementResults();
+}
+
+bool LatencyTest::SetDevice(LatencyTestDevice* device)
+{
+
+ if (device != Device)
+ {
+ if (device != NULL)
+ {
+ if (device->GetMessageHandler() != NULL)
+ {
+ OVR_DEBUG_LOG(
+ ("LatencyTest::AttachToDevice failed - device %p already has handler", device));
+ return false;
+ }
+ }
+
+ if (Device != NULL)
+ {
+ Device->SetMessageHandler(0);
+ }
+ Device = device;
+
+ if (Device != NULL)
+ {
+ Device->SetMessageHandler(&Handler);
+
+ // Set trigger threshold.
+ LatencyTestConfiguration configuration(SENSOR_DETECT_THRESHOLD, false); // No samples streaming.
+ Device->SetConfiguration(configuration, true);
+
+ // Set display to intial (3 dashes).
+ LatencyTestDisplay ltd(2, 0x40400040);
+ Device->SetDisplay(ltd);
+ }
+ }
+
+ return true;
+}
+
+UInt32 LatencyTest::getRandomComponent(UInt32 range)
+{
+ UInt32 val = rand() % range;
+ return val;
+}
+
+void LatencyTest::BeginTest()
+{
+ if (State == State_WaitingForButton)
+ {
+ // Set color to black and wait a while.
+ RenderColor = CALIBRATE_BLACK;
+
+ State = State_WaitingForSettlePreCalibrationColorBlack;
+ OVR_DEBUG_LOG(("State_WaitingForButton -> State_WaitingForSettlePreCalibrationColorBlack."));
+
+ setTimer(TIME_TO_WAIT_FOR_SETTLE_PRE_CALIBRATION);
+ }
+}
+
+void LatencyTest::handleMessage(const Message& msg, LatencyTestMessageType latencyTestMessage)
+{
+ // For debugging.
+/* if (msg.Type == Message_LatencyTestSamples)
+ {
+ MessageLatencyTestSamples* pSamples = (MessageLatencyTestSamples*) &msg;
+
+ if (pSamples->Samples.GetSize() > 0)
+ {
+ // Just show the first one for now.
+ Color c = pSamples->Samples[0];
+ OVR_DEBUG_LOG(("%d %d %d", c.R, c.G, c.B));
+ }
+ return;
+ }
+*/
+
+ if (latencyTestMessage == LatencyTest_Timer)
+ {
+ if (!Device)
+ {
+ reset();
+ return;
+ }
+
+ if (State == State_WaitingForSettlePreCalibrationColorBlack)
+ {
+ // Send calibrate message to device and wait a while.
+ Device->SetCalibrate(CALIBRATE_BLACK);
+
+ State = State_WaitingForSettlePostCalibrationColorBlack;
+ OVR_DEBUG_LOG(("State_WaitingForSettlePreCalibrationColorBlack -> State_WaitingForSettlePostCalibrationColorBlack."));
+
+ setTimer(TIME_TO_WAIT_FOR_SETTLE_POST_CALIBRATION);
+ }
+ else if (State == State_WaitingForSettlePostCalibrationColorBlack)
+ {
+ // Change color to white and wait a while.
+ RenderColor = CALIBRATE_WHITE;
+
+ State = State_WaitingForSettlePreCalibrationColorWhite;
+ OVR_DEBUG_LOG(("State_WaitingForSettlePostCalibrationColorBlack -> State_WaitingForSettlePreCalibrationColorWhite."));
+
+ setTimer(TIME_TO_WAIT_FOR_SETTLE_PRE_CALIBRATION);
+ }
+ else if (State == State_WaitingForSettlePreCalibrationColorWhite)
+ {
+ // Send calibrate message to device and wait a while.
+ Device->SetCalibrate(CALIBRATE_WHITE);
+
+ State = State_WaitingForSettlePostCalibrationColorWhite;
+ OVR_DEBUG_LOG(("State_WaitingForSettlePreCalibrationColorWhite -> State_WaitingForSettlePostCalibrationColorWhite."));
+
+ setTimer(TIME_TO_WAIT_FOR_SETTLE_POST_CALIBRATION);
+ }
+ else if (State == State_WaitingForSettlePostCalibrationColorWhite)
+ {
+ // Calibration is done. Switch to color 1 and wait for it to settle.
+ RenderColor = COLOR1;
+
+ State = State_WaitingForSettlePostMeasurement;
+ OVR_DEBUG_LOG(("State_WaitingForSettlePostCalibrationColorWhite -> State_WaitingForSettlePostMeasurement."));
+
+ UInt32 waitTime = TIME_TO_WAIT_FOR_SETTLE_POST_MEASUREMENT + getRandomComponent(TIME_TO_WAIT_FOR_SETTLE_POST_MEASUREMENT_RANDOMNESS);
+ setTimer(waitTime);
+ }
+ else if (State == State_WaitingForSettlePostMeasurement)
+ {
+ // Prepare for next measurement.
+
+ // Create a new result object.
+ MeasurementResult* pResult = new MeasurementResult();
+ Results.PushBack(pResult);
+
+ State = State_WaitingToTakeMeasurement;
+ OVR_DEBUG_LOG(("State_WaitingForSettlePostMeasurement -> State_WaitingToTakeMeasurement."));
+ }
+ else if (State == State_WaitingForTestStarted)
+ {
+ // We timed out waiting for 'TestStarted'. Abandon this measurement and setup for the next.
+ getActiveResult()->TimedOutWaitingForTestStarted = true;
+
+ State = State_WaitingForSettlePostMeasurement;
+ OVR_DEBUG_LOG(("** Timed out waiting for 'TestStarted'."));
+ OVR_DEBUG_LOG(("State_WaitingForTestStarted -> State_WaitingForSettlePostMeasurement."));
+
+ UInt32 waitTime = TIME_TO_WAIT_FOR_SETTLE_POST_MEASUREMENT + getRandomComponent(TIME_TO_WAIT_FOR_SETTLE_POST_MEASUREMENT_RANDOMNESS);
+ setTimer(waitTime);
+ }
+ else if (State == State_WaitingForColorDetected)
+ {
+ // We timed out waiting for 'ColorDetected'. Abandon this measurement and setup for the next.
+ getActiveResult()->TimedOutWaitingForColorDetected = true;
+
+ State = State_WaitingForSettlePostMeasurement;
+ OVR_DEBUG_LOG(("** Timed out waiting for 'ColorDetected'."));
+ OVR_DEBUG_LOG(("State_WaitingForColorDetected -> State_WaitingForSettlePostMeasurement."));
+
+ UInt32 waitTime = TIME_TO_WAIT_FOR_SETTLE_POST_MEASUREMENT + getRandomComponent(TIME_TO_WAIT_FOR_SETTLE_POST_MEASUREMENT_RANDOMNESS);
+ setTimer(waitTime);
+ }
+ }
+ else if (latencyTestMessage == LatencyTest_ProcessInputs)
+ {
+ if (State == State_WaitingToTakeMeasurement)
+ {
+ if (!Device)
+ {
+ reset();
+ return;
+ }
+
+ // Send 'StartTest' feature report with opposite target color.
+ if (RenderColor == COLOR1)
+ {
+ RenderColor = COLOR2;
+ }
+ else
+ {
+ RenderColor = COLOR1;
+ }
+
+ getActiveResult()->TargetColor = RenderColor;
+
+ // Record time so we can determine usb roundtrip time.
+ getActiveResult()->StartTestTicksMicroS = Timer::GetTicks();
+
+ Device->SetStartTest(RenderColor);
+
+ State = State_WaitingForTestStarted;
+ OVR_DEBUG_LOG(("State_WaitingToTakeMeasurement -> State_WaitingForTestStarted."));
+
+ setTimer(TIMEOUT_WAITING_FOR_TEST_STARTED);
+
+ LatencyTestDisplay ltd(2, 0x40090040);
+ Device->SetDisplay(ltd);
+ }
+ }
+ else if (msg.Type == Message_LatencyTestButton)
+ {
+ BeginTest();
+ }
+ else if (msg.Type == Message_LatencyTestStarted)
+ {
+ if (State == State_WaitingForTestStarted)
+ {
+ clearTimer();
+
+ // Record time so we can determine usb roundtrip time.
+ getActiveResult()->TestStartedTicksMicroS = Timer::GetTicks();
+
+ State = State_WaitingForColorDetected;
+ OVR_DEBUG_LOG(("State_WaitingForTestStarted -> State_WaitingForColorDetected."));
+
+ setTimer(TIMEOUT_WAITING_FOR_COLOR_DETECTED);
+ }
+ }
+ else if (msg.Type == Message_LatencyTestColorDetected)
+ {
+ if (State == State_WaitingForColorDetected)
+ {
+ // Record time to detect color.
+ MessageLatencyTestColorDetected* pDetected = (MessageLatencyTestColorDetected*) &msg;
+ UInt16 elapsedTime = pDetected->Elapsed;
+ OVR_DEBUG_LOG(("Time to 'ColorDetected' = %d", elapsedTime));
+
+ getActiveResult()->DeviceMeasuredElapsedMilliS = elapsedTime;
+
+ if (areResultsComplete())
+ {
+ // We're done.
+ processResults();
+ reset();
+ }
+ else
+ {
+ // Run another measurement.
+ State = State_WaitingForSettlePostMeasurement;
+ OVR_DEBUG_LOG(("State_WaitingForColorDetected -> State_WaitingForSettlePostMeasurement."));
+
+ UInt32 waitTime = TIME_TO_WAIT_FOR_SETTLE_POST_MEASUREMENT + getRandomComponent(TIME_TO_WAIT_FOR_SETTLE_POST_MEASUREMENT_RANDOMNESS);
+ setTimer(waitTime);
+
+ LatencyTestDisplay ltd(2, 0x40400040);
+ Device->SetDisplay(ltd);
+ }
+ }
+ }
+ else if (msg.Type == Message_DeviceRemoved)
+ {
+ reset();
+ }
+}
+
+LatencyTest::MeasurementResult* LatencyTest::getActiveResult()
+{
+ OVR_ASSERT(!Results.IsEmpty());
+ return Results.GetLast();
+}
+
+void LatencyTest::setTimer(UInt32 timeMilliS)
+{
+ ActiveTimerMilliS = timeMilliS;
+}
+
+void LatencyTest::clearTimer()
+{
+ ActiveTimerMilliS = 0;
+}
+
+void LatencyTest::reset()
+{
+ clearMeasurementResults();
+ State = State_WaitingForButton;
+
+ HaveOldTime = false;
+ ActiveTimerMilliS = 0;
+}
+
+void LatencyTest::clearMeasurementResults()
+{
+ while(!Results.IsEmpty())
+ {
+ MeasurementResult* pElem = Results.GetFirst();
+ pElem->RemoveNode();
+ delete pElem;
+ }
+}
+
+LatencyTest::LatencyTestHandler::~LatencyTestHandler()
+{
+ RemoveHandlerFromDevices();
+}
+
+void LatencyTest::LatencyTestHandler::OnMessage(const Message& msg)
+{
+ pLatencyTestUtil->handleMessage(msg);
+}
+
+void LatencyTest::ProcessInputs()
+{
+ updateForTimeouts();
+ handleMessage(Message(), LatencyTest_ProcessInputs);
+}
+
+bool LatencyTest::DisplayScreenColor(Color& colorToDisplay)
+{
+ updateForTimeouts();
+
+ if (State == State_WaitingForButton)
+ {
+ return false;
+ }
+
+ colorToDisplay = RenderColor;
+ return true;
+}
+
+const char* LatencyTest::GetResultsString()
+{
+ if (!ResultsString.IsEmpty() && ReturnedResultString != ResultsString.ToCStr())
+ {
+ ReturnedResultString = ResultsString;
+ return ReturnedResultString.ToCStr();
+ }
+
+ return NULL;
+}
+
+bool LatencyTest::areResultsComplete()
+{
+ UInt32 initialMeasurements = 0;
+
+ UInt32 measurements1to2 = 0;
+ UInt32 measurements2to1 = 0;
+
+ MeasurementResult* pCurr = Results.GetFirst();
+ while(true)
+ {
+ // Process.
+ if (!pCurr->TimedOutWaitingForTestStarted &&
+ !pCurr->TimedOutWaitingForColorDetected)
+ {
+ initialMeasurements++;
+
+ if (initialMeasurements > INITIAL_SAMPLES_TO_IGNORE)
+ {
+ if (pCurr->TargetColor == COLOR2)
+ {
+ measurements1to2++;
+ }
+ else
+ {
+ measurements2to1++;
+ }
+ }
+ }
+
+ if (Results.IsLast(pCurr))
+ {
+ break;
+ }
+ pCurr = Results.GetNext(pCurr);
+ }
+
+ if (measurements1to2 >= DEFAULT_NUMBER_OF_SAMPLES &&
+ measurements2to1 >= DEFAULT_NUMBER_OF_SAMPLES)
+ {
+ return true;
+ }
+
+ return false;
+}
+
+void LatencyTest::processResults()
+{
+
+ UInt32 minTime1To2 = UINT_MAX;
+ UInt32 maxTime1To2 = 0;
+ float averageTime1To2 = 0.0f;
+ UInt32 minTime2To1 = UINT_MAX;
+ UInt32 maxTime2To1 = 0;
+ float averageTime2To1 = 0.0f;
+
+ float minUSBTripMilliS = BIG_FLOAT;
+ float maxUSBTripMilliS = SMALL_FLOAT;
+ float averageUSBTripMilliS = 0.0f;
+ UInt32 countUSBTripTime = 0;
+
+ UInt32 measurementsCount = 0;
+ UInt32 measurements1to2 = 0;
+ UInt32 measurements2to1 = 0;
+
+ MeasurementResult* pCurr = Results.GetFirst();
+ UInt32 count = 0;
+ while(true)
+ {
+ count++;
+
+ if (!pCurr->TimedOutWaitingForTestStarted &&
+ !pCurr->TimedOutWaitingForColorDetected)
+ {
+ measurementsCount++;
+
+ if (measurementsCount > INITIAL_SAMPLES_TO_IGNORE)
+ {
+ if (pCurr->TargetColor == COLOR2)
+ {
+ measurements1to2++;
+
+ if (measurements1to2 <= DEFAULT_NUMBER_OF_SAMPLES)
+ {
+ UInt32 elapsed = pCurr->DeviceMeasuredElapsedMilliS;
+
+ minTime1To2 = Alg::Min(elapsed, minTime1To2);
+ maxTime1To2 = Alg::Max(elapsed, maxTime1To2);
+
+ averageTime1To2 += (float) elapsed;
+ }
+ }
+ else
+ {
+ measurements2to1++;
+
+ if (measurements2to1 <= DEFAULT_NUMBER_OF_SAMPLES)
+ {
+ UInt32 elapsed = pCurr->DeviceMeasuredElapsedMilliS;
+
+ minTime2To1 = Alg::Min(elapsed, minTime2To1);
+ maxTime2To1 = Alg::Max(elapsed, maxTime2To1);
+
+ averageTime2To1 += (float) elapsed;
+ }
+ }
+
+ float usbRountripElapsedMilliS = 0.001f * (float) (pCurr->TestStartedTicksMicroS - pCurr->StartTestTicksMicroS);
+ minUSBTripMilliS = Alg::Min(usbRountripElapsedMilliS, minUSBTripMilliS);
+ maxUSBTripMilliS = Alg::Max(usbRountripElapsedMilliS, maxUSBTripMilliS);
+ averageUSBTripMilliS += usbRountripElapsedMilliS;
+ countUSBTripTime++;
+ }
+ }
+
+ if (measurements1to2 >= DEFAULT_NUMBER_OF_SAMPLES &&
+ measurements2to1 >= DEFAULT_NUMBER_OF_SAMPLES)
+ {
+ break;
+ }
+
+ if (Results.IsLast(pCurr))
+ {
+ break;
+ }
+ pCurr = Results.GetNext(pCurr);
+ }
+
+ averageTime1To2 /= (float) DEFAULT_NUMBER_OF_SAMPLES;
+ averageTime2To1 /= (float) DEFAULT_NUMBER_OF_SAMPLES;
+
+ averageUSBTripMilliS /= countUSBTripTime;
+
+ float finalResult = 0.5f * (averageTime1To2 + averageTime2To1);
+ finalResult += averageUSBTripMilliS;
+
+ ResultsString.Clear();
+ ResultsString.AppendFormat("RESULT=%.1f (add half Tracker period) [b->w %d|%.1f|%d] [w->b %d|%.1f|%d] [usb rndtrp %.1f|%.1f|%.1f] [cnt %d] [tmouts %d]",
+ finalResult,
+ minTime1To2, averageTime1To2, maxTime1To2,
+ minTime2To1, averageTime2To1, maxTime2To1,
+ minUSBTripMilliS, averageUSBTripMilliS, maxUSBTripMilliS,
+ DEFAULT_NUMBER_OF_SAMPLES*2, count - measurementsCount);
+
+ // Display result on latency tester display.
+ LatencyTestDisplay ltd(1, (int)finalResult);
+ Device->SetDisplay(ltd);
+}
+
+void LatencyTest::updateForTimeouts()
+{
+ if (!HaveOldTime)
+ {
+ HaveOldTime = true;
+ OldTime = Timer::GetTicksMs();
+ return;
+ }
+
+ UInt32 newTime = Timer::GetTicksMs();
+ UInt32 elapsedMilliS = newTime - OldTime;
+ if (newTime < OldTime)
+ {
+ elapsedMilliS = OldTime - newTime;
+ elapsedMilliS = UINT_MAX - elapsedMilliS;
+ }
+ OldTime = newTime;
+
+ elapsedMilliS = Alg::Min(elapsedMilliS, (UInt32) 100); // Clamp at 100mS in case we're not being called very often.
+
+
+ if (ActiveTimerMilliS == 0)
+ {
+ return;
+ }
+
+ if (elapsedMilliS >= ActiveTimerMilliS)
+ {
+ ActiveTimerMilliS = 0;
+ handleMessage(Message(), LatencyTest_Timer);
+ return;
+ }
+
+ ActiveTimerMilliS -= elapsedMilliS;
+}
+
+}} // namespace OVR::Util
diff --git a/LibOVR/Src/Util/Util_LatencyTest.h b/LibOVR/Src/Util/Util_LatencyTest.h
index 47f98f7..a01864e 100644
--- a/LibOVR/Src/Util/Util_LatencyTest.h
+++ b/LibOVR/Src/Util/Util_LatencyTest.h
@@ -1,160 +1,160 @@
-/************************************************************************************
-
-PublicHeader: OVR.h
-Filename : Util_LatencyTest.h
-Content : Wraps the lower level LatencyTesterDevice and adds functionality.
-Created : February 14, 2013
-Authors : Lee Cooper
-
-Copyright : Copyright 2013 Oculus VR, Inc. All Rights reserved.
-
-Use of this software is subject to the terms of the Oculus license
-agreement provided at the time of installation or download, or which
-otherwise accompanies this software in either electronic or hard copy form.
-
-*************************************************************************************/
-
-#ifndef OVR_Util_LatencyTest_h
-#define OVR_Util_LatencyTest_h
-
-#include "../OVR_Device.h"
-
-#include "../Kernel/OVR_String.h"
-#include "../Kernel/OVR_List.h"
-
-namespace OVR { namespace Util {
-
-
-//-------------------------------------------------------------------------------------
-// ***** LatencyTest
-//
-// LatencyTest utility class wraps the low level LatencyTestDevice and manages the scheduling
-// of a latency test. A single test is composed of a series of individual latency measurements
-// which are used to derive min, max, and an average latency value.
-//
-// Developers are required to call the following methods:
-// SetDevice - Sets the LatencyTestDevice to be used for the tests.
-// ProcessInputs - This should be called at the same place in the code where the game engine
-// reads the headset orientation from LibOVR (typically done by calling
-// 'GetOrientation' on the SensorFusion object). Calling this at the right time
-// enables us to measure the same latency that occurs for headset orientation
-// changes.
-// DisplayScreenColor - The latency tester works by sensing the color of the pixels directly
-// beneath it. The color of these pixels can be set by drawing a small
-// quad at the end of the rendering stage. The quad should be small
-// such that it doesn't significantly impact the rendering of the scene,
-// but large enough to be 'seen' by the sensor. See the SDK
-// documentation for more information.
-// GetResultsString - Call this to get a string containing the most recent results.
-// If the string has already been gotten then NULL will be returned.
-// The string pointer will remain valid until the next time this
-// method is called.
-//
-
-class LatencyTest : public NewOverrideBase
-{
-public:
- LatencyTest(LatencyTestDevice* device = NULL);
- ~LatencyTest();
-
- // Set the Latency Tester device that we'll use to send commands to and receive
- // notification messages from.
- bool SetDevice(LatencyTestDevice* device);
-
- // Returns true if this LatencyTestUtil has a Latency Tester device.
- bool HasDevice() const
- { return Handler.IsHandlerInstalled(); }
-
- void ProcessInputs();
- bool DisplayScreenColor(Color& colorToDisplay);
- const char* GetResultsString();
-
- // Begin test. Equivalent to pressing the button on the latency tester.
- void BeginTest();
-
-private:
- LatencyTest* getThis() { return this; }
-
- enum LatencyTestMessageType
- {
- LatencyTest_None,
- LatencyTest_Timer,
- LatencyTest_ProcessInputs,
- };
-
- UInt32 getRandomComponent(UInt32 range);
- void handleMessage(const Message& msg, LatencyTestMessageType latencyTestMessage = LatencyTest_None);
- void reset();
- void setTimer(UInt32 timeMilliS);
- void clearTimer();
-
- class LatencyTestHandler : public MessageHandler
- {
- LatencyTest* pLatencyTestUtil;
- public:
- LatencyTestHandler(LatencyTest* latencyTester) : pLatencyTestUtil(latencyTester) { }
- ~LatencyTestHandler();
-
- virtual void OnMessage(const Message& msg);
- };
-
- bool areResultsComplete();
- void processResults();
- void updateForTimeouts();
-
- Ptr<LatencyTestDevice> Device;
- LatencyTestHandler Handler;
-
- enum TesterState
- {
- State_WaitingForButton,
- State_WaitingForSettlePreCalibrationColorBlack,
- State_WaitingForSettlePostCalibrationColorBlack,
- State_WaitingForSettlePreCalibrationColorWhite,
- State_WaitingForSettlePostCalibrationColorWhite,
- State_WaitingToTakeMeasurement,
- State_WaitingForTestStarted,
- State_WaitingForColorDetected,
- State_WaitingForSettlePostMeasurement
- };
- TesterState State;
-
- bool HaveOldTime;
- UInt32 OldTime;
- UInt32 ActiveTimerMilliS;
-
- Color RenderColor;
-
- struct MeasurementResult : public ListNode<MeasurementResult>, public NewOverrideBase
- {
- MeasurementResult()
- : DeviceMeasuredElapsedMilliS(0),
- TimedOutWaitingForTestStarted(false),
- TimedOutWaitingForColorDetected(false),
- StartTestTicksMicroS(0),
- TestStartedTicksMicroS(0)
- {}
-
- Color TargetColor;
-
- UInt32 DeviceMeasuredElapsedMilliS;
-
- bool TimedOutWaitingForTestStarted;
- bool TimedOutWaitingForColorDetected;
-
- UInt64 StartTestTicksMicroS;
- UInt64 TestStartedTicksMicroS;
- };
-
- List<MeasurementResult> Results;
- void clearMeasurementResults();
-
- MeasurementResult* getActiveResult();
-
- StringBuffer ResultsString;
- String ReturnedResultString;
-};
-
-}} // namespace OVR::Util
-
-#endif // OVR_Util_LatencyTest_h
+/************************************************************************************
+
+PublicHeader: OVR.h
+Filename : Util_LatencyTest.h
+Content : Wraps the lower level LatencyTesterDevice and adds functionality.
+Created : February 14, 2013
+Authors : Lee Cooper
+
+Copyright : Copyright 2013 Oculus VR, Inc. All Rights reserved.
+
+Use of this software is subject to the terms of the Oculus license
+agreement provided at the time of installation or download, or which
+otherwise accompanies this software in either electronic or hard copy form.
+
+*************************************************************************************/
+
+#ifndef OVR_Util_LatencyTest_h
+#define OVR_Util_LatencyTest_h
+
+#include "../OVR_Device.h"
+
+#include "../Kernel/OVR_String.h"
+#include "../Kernel/OVR_List.h"
+
+namespace OVR { namespace Util {
+
+
+//-------------------------------------------------------------------------------------
+// ***** LatencyTest
+//
+// LatencyTest utility class wraps the low level LatencyTestDevice and manages the scheduling
+// of a latency test. A single test is composed of a series of individual latency measurements
+// which are used to derive min, max, and an average latency value.
+//
+// Developers are required to call the following methods:
+// SetDevice - Sets the LatencyTestDevice to be used for the tests.
+// ProcessInputs - This should be called at the same place in the code where the game engine
+// reads the headset orientation from LibOVR (typically done by calling
+// 'GetOrientation' on the SensorFusion object). Calling this at the right time
+// enables us to measure the same latency that occurs for headset orientation
+// changes.
+// DisplayScreenColor - The latency tester works by sensing the color of the pixels directly
+// beneath it. The color of these pixels can be set by drawing a small
+// quad at the end of the rendering stage. The quad should be small
+// such that it doesn't significantly impact the rendering of the scene,
+// but large enough to be 'seen' by the sensor. See the SDK
+// documentation for more information.
+// GetResultsString - Call this to get a string containing the most recent results.
+// If the string has already been gotten then NULL will be returned.
+// The string pointer will remain valid until the next time this
+// method is called.
+//
+
+class LatencyTest : public NewOverrideBase
+{
+public:
+ LatencyTest(LatencyTestDevice* device = NULL);
+ ~LatencyTest();
+
+ // Set the Latency Tester device that we'll use to send commands to and receive
+ // notification messages from.
+ bool SetDevice(LatencyTestDevice* device);
+
+ // Returns true if this LatencyTestUtil has a Latency Tester device.
+ bool HasDevice() const
+ { return Handler.IsHandlerInstalled(); }
+
+ void ProcessInputs();
+ bool DisplayScreenColor(Color& colorToDisplay);
+ const char* GetResultsString();
+
+ // Begin test. Equivalent to pressing the button on the latency tester.
+ void BeginTest();
+
+private:
+ LatencyTest* getThis() { return this; }
+
+ enum LatencyTestMessageType
+ {
+ LatencyTest_None,
+ LatencyTest_Timer,
+ LatencyTest_ProcessInputs,
+ };
+
+ UInt32 getRandomComponent(UInt32 range);
+ void handleMessage(const Message& msg, LatencyTestMessageType latencyTestMessage = LatencyTest_None);
+ void reset();
+ void setTimer(UInt32 timeMilliS);
+ void clearTimer();
+
+ class LatencyTestHandler : public MessageHandler
+ {
+ LatencyTest* pLatencyTestUtil;
+ public:
+ LatencyTestHandler(LatencyTest* latencyTester) : pLatencyTestUtil(latencyTester) { }
+ ~LatencyTestHandler();
+
+ virtual void OnMessage(const Message& msg);
+ };
+
+ bool areResultsComplete();
+ void processResults();
+ void updateForTimeouts();
+
+ Ptr<LatencyTestDevice> Device;
+ LatencyTestHandler Handler;
+
+ enum TesterState
+ {
+ State_WaitingForButton,
+ State_WaitingForSettlePreCalibrationColorBlack,
+ State_WaitingForSettlePostCalibrationColorBlack,
+ State_WaitingForSettlePreCalibrationColorWhite,
+ State_WaitingForSettlePostCalibrationColorWhite,
+ State_WaitingToTakeMeasurement,
+ State_WaitingForTestStarted,
+ State_WaitingForColorDetected,
+ State_WaitingForSettlePostMeasurement
+ };
+ TesterState State;
+
+ bool HaveOldTime;
+ UInt32 OldTime;
+ UInt32 ActiveTimerMilliS;
+
+ Color RenderColor;
+
+ struct MeasurementResult : public ListNode<MeasurementResult>, public NewOverrideBase
+ {
+ MeasurementResult()
+ : DeviceMeasuredElapsedMilliS(0),
+ TimedOutWaitingForTestStarted(false),
+ TimedOutWaitingForColorDetected(false),
+ StartTestTicksMicroS(0),
+ TestStartedTicksMicroS(0)
+ {}
+
+ Color TargetColor;
+
+ UInt32 DeviceMeasuredElapsedMilliS;
+
+ bool TimedOutWaitingForTestStarted;
+ bool TimedOutWaitingForColorDetected;
+
+ UInt64 StartTestTicksMicroS;
+ UInt64 TestStartedTicksMicroS;
+ };
+
+ List<MeasurementResult> Results;
+ void clearMeasurementResults();
+
+ MeasurementResult* getActiveResult();
+
+ StringBuffer ResultsString;
+ String ReturnedResultString;
+};
+
+}} // namespace OVR::Util
+
+#endif // OVR_Util_LatencyTest_h
diff --git a/LibOVR/Src/Util/Util_MagCalibration.cpp b/LibOVR/Src/Util/Util_MagCalibration.cpp
index f3e72f5..c537154 100644
--- a/LibOVR/Src/Util/Util_MagCalibration.cpp
+++ b/LibOVR/Src/Util/Util_MagCalibration.cpp
@@ -1,180 +1,180 @@
-/************************************************************************************
-
-Filename : Util_MagCalibration.cpp
-Content : Procedures for calibrating the magnetometer
-Created : April 16, 2013
-Authors : Steve LaValle, Andrew Reisse
-
-Copyright : Copyright 2013 Oculus VR, Inc. All Rights reserved.
-
-Use of this software is subject to the terms of the Oculus license
-agreement provided at the time of installation or download, or which
-otherwise accompanies this software in either electronic or hard copy form.
-
-*************************************************************************************/
-
-#include "Util_MagCalibration.h"
-
-namespace OVR { namespace Util {
-
-void MagCalibration::BeginAutoCalibration(SensorFusion& sf)
-{
- Status = Mag_AutoCalibrating;
- // This is a "hard" reset of the mag, so need to clear stored values
- sf.ClearMagCalibration();
- SampleCount = 0;
-}
-
-unsigned MagCalibration::UpdateAutoCalibration(SensorFusion& sf)
-{
- if (Status != Mag_AutoCalibrating)
- return Status;
-
- Quatf q = sf.GetOrientation();
- Vector3f m = sf.GetMagnetometer();
-
- InsertIfAcceptable(q, m);
-
- if ((SampleCount == 4) && (Status == Mag_AutoCalibrating))
- SetCalibration(sf);
-
- return Status;
-
-}
-
-void MagCalibration::BeginManualCalibration(SensorFusion& sf)
-{
- Status = Mag_ManuallyCalibrating;
- sf.ClearMagCalibration();
- SampleCount = 0;
-}
-
-bool MagCalibration::IsAcceptableSample(const Quatf& q, const Vector3f& m)
-{
- switch (SampleCount)
- {
- // Initial sample is always acceptable
- case 0:
- return true;
- break;
- case 1:
- return (q.DistanceSq(QuatSamples[0]) > MinQuatDistanceSq)&&
- ((m - MagSamples[0]).LengthSq() > MinMagDistanceSq);
- break;
- case 2:
- return (q.DistanceSq(QuatSamples[0]) > MinQuatDistanceSq)&&
- (q.DistanceSq(QuatSamples[1]) > MinQuatDistanceSq)&&
- ((m - MagSamples[0]).LengthSq() > MinMagDistanceSq)&&
- ((m - MagSamples[1]).LengthSq() > MinMagDistanceSq);
- break;
- case 3:
- return (q.DistanceSq(QuatSamples[0]) > MinQuatDistanceSq)&&
- (q.DistanceSq(QuatSamples[1]) > MinQuatDistanceSq)&&
- (q.DistanceSq(QuatSamples[2]) > MinQuatDistanceSq)&&
- ((PointToPlaneDistance(MagSamples[0],MagSamples[1],MagSamples[2],m) > MinMagDistance)||
- (PointToPlaneDistance(MagSamples[1],MagSamples[2],m,MagSamples[0]) > MinMagDistance)||
- (PointToPlaneDistance(MagSamples[2],m,MagSamples[0],MagSamples[1]) > MinMagDistance)||
- (PointToPlaneDistance(m,MagSamples[0],MagSamples[1],MagSamples[2]) > MinMagDistance));
- }
-
- return false;
-}
-
-
-bool MagCalibration::InsertIfAcceptable(const Quatf& q, const Vector3f& m)
-{
- if (IsAcceptableSample(q, m))
- {
- MagSamples[SampleCount] = m;
- QuatSamples[SampleCount] = q;
- SampleCount++;
- return true;
- }
-
- return false;
-}
-
-
-bool MagCalibration::SetCalibration(SensorFusion& sf)
-{
- if (SampleCount < 4)
- return false;
-
- MagCenter = CalculateSphereCenter(MagSamples[0],MagSamples[1],MagSamples[2],MagSamples[3]);
- Matrix4f calMat = Matrix4f();
- calMat.M[0][3] = -MagCenter.x;
- calMat.M[1][3] = -MagCenter.y;
- calMat.M[2][3] = -MagCenter.z;
- sf.SetMagCalibration(calMat);
- Status = Mag_Calibrated;
- //LogText("MagCenter: %f %f %f\n",MagCenter.x,MagCenter.y,MagCenter.z);
-
- return true;
-}
-
-
-// Calculate the center of a sphere that passes through p1, p2, p3, p4
-Vector3f MagCalibration::CalculateSphereCenter(const Vector3f& p1, const Vector3f& p2,
- const Vector3f& p3, const Vector3f& p4)
-{
- Matrix4f A;
- int i;
- Vector3f p[4];
- p[0] = p1;
- p[1] = p2;
- p[2] = p3;
- p[3] = p4;
-
- for (i = 0; i < 4; i++)
- {
- A.M[i][0] = p[i].x;
- A.M[i][1] = p[i].y;
- A.M[i][2] = p[i].z;
- A.M[i][3] = 1.0f;
- }
- float m11 = A.Determinant();
- OVR_ASSERT(m11 != 0.0f);
-
- for (i = 0; i < 4; i++)
- {
- A.M[i][0] = p[i].x*p[i].x + p[i].y*p[i].y + p[i].z*p[i].z;
- A.M[i][1] = p[i].y;
- A.M[i][2] = p[i].z;
- A.M[i][3] = 1.0f;
- }
- float m12 = A.Determinant();
-
- for (i = 0; i < 4; i++)
- {
- A.M[i][0] = p[i].x*p[i].x + p[i].y*p[i].y + p[i].z*p[i].z;
- A.M[i][1] = p[i].x;
- A.M[i][2] = p[i].z;
- A.M[i][3] = 1.0f;
- }
- float m13 = A.Determinant();
-
- for (i = 0; i < 4; i++)
- {
- A.M[i][0] = p[i].x*p[i].x + p[i].y*p[i].y + p[i].z*p[i].z;
- A.M[i][1] = p[i].x;
- A.M[i][2] = p[i].y;
- A.M[i][3] = 1.0f;
- }
- float m14 = A.Determinant();
-
- float c = 0.5f / m11;
- return Vector3f(c*m12, -c*m13, c*m14);
-}
-
-// Distance from p4 to the nearest point on a plane through p1, p2, p3
-float MagCalibration::PointToPlaneDistance(const Vector3f& p1, const Vector3f& p2,
- const Vector3f& p3, const Vector3f& p4)
-{
- Vector3f v1 = p1 - p2;
- Vector3f v2 = p1 - p3;
- Vector3f planeNormal = v1.Cross(v2);
- planeNormal.Normalize();
- return (fabs((planeNormal * p4) - planeNormal * p1));
-}
-
-}}
+/************************************************************************************
+
+Filename : Util_MagCalibration.cpp
+Content : Procedures for calibrating the magnetometer
+Created : April 16, 2013
+Authors : Steve LaValle, Andrew Reisse
+
+Copyright : Copyright 2013 Oculus VR, Inc. All Rights reserved.
+
+Use of this software is subject to the terms of the Oculus license
+agreement provided at the time of installation or download, or which
+otherwise accompanies this software in either electronic or hard copy form.
+
+*************************************************************************************/
+
+#include "Util_MagCalibration.h"
+
+namespace OVR { namespace Util {
+
+void MagCalibration::BeginAutoCalibration(SensorFusion& sf)
+{
+ Stat = Mag_AutoCalibrating;
+ // This is a "hard" reset of the mag, so need to clear stored values
+ sf.ClearMagCalibration();
+ SampleCount = 0;
+}
+
+unsigned MagCalibration::UpdateAutoCalibration(SensorFusion& sf)
+{
+ if (Stat != Mag_AutoCalibrating)
+ return Stat;
+
+ Quatf q = sf.GetOrientation();
+ Vector3f m = sf.GetMagnetometer();
+
+ InsertIfAcceptable(q, m);
+
+ if ((SampleCount == 4) && (Stat == Mag_AutoCalibrating))
+ SetCalibration(sf);
+
+ return Stat;
+
+}
+
+void MagCalibration::BeginManualCalibration(SensorFusion& sf)
+{
+ Stat = Mag_ManuallyCalibrating;
+ sf.ClearMagCalibration();
+ SampleCount = 0;
+}
+
+bool MagCalibration::IsAcceptableSample(const Quatf& q, const Vector3f& m)
+{
+ switch (SampleCount)
+ {
+ // Initial sample is always acceptable
+ case 0:
+ return true;
+ break;
+ case 1:
+ return (q.DistanceSq(QuatSamples[0]) > MinQuatDistanceSq)&&
+ ((m - MagSamples[0]).LengthSq() > MinMagDistanceSq);
+ break;
+ case 2:
+ return (q.DistanceSq(QuatSamples[0]) > MinQuatDistanceSq)&&
+ (q.DistanceSq(QuatSamples[1]) > MinQuatDistanceSq)&&
+ ((m - MagSamples[0]).LengthSq() > MinMagDistanceSq)&&
+ ((m - MagSamples[1]).LengthSq() > MinMagDistanceSq);
+ break;
+ case 3:
+ return (q.DistanceSq(QuatSamples[0]) > MinQuatDistanceSq)&&
+ (q.DistanceSq(QuatSamples[1]) > MinQuatDistanceSq)&&
+ (q.DistanceSq(QuatSamples[2]) > MinQuatDistanceSq)&&
+ ((PointToPlaneDistance(MagSamples[0],MagSamples[1],MagSamples[2],m) > MinMagDistance)||
+ (PointToPlaneDistance(MagSamples[1],MagSamples[2],m,MagSamples[0]) > MinMagDistance)||
+ (PointToPlaneDistance(MagSamples[2],m,MagSamples[0],MagSamples[1]) > MinMagDistance)||
+ (PointToPlaneDistance(m,MagSamples[0],MagSamples[1],MagSamples[2]) > MinMagDistance));
+ }
+
+ return false;
+}
+
+
+bool MagCalibration::InsertIfAcceptable(const Quatf& q, const Vector3f& m)
+{
+ if (IsAcceptableSample(q, m))
+ {
+ MagSamples[SampleCount] = m;
+ QuatSamples[SampleCount] = q;
+ SampleCount++;
+ return true;
+ }
+
+ return false;
+}
+
+
+bool MagCalibration::SetCalibration(SensorFusion& sf)
+{
+ if (SampleCount < 4)
+ return false;
+
+ MagCenter = CalculateSphereCenter(MagSamples[0],MagSamples[1],MagSamples[2],MagSamples[3]);
+ Matrix4f calMat = Matrix4f();
+ calMat.M[0][3] = -MagCenter.x;
+ calMat.M[1][3] = -MagCenter.y;
+ calMat.M[2][3] = -MagCenter.z;
+ sf.SetMagCalibration(calMat);
+ Stat = Mag_Calibrated;
+ //LogText("MagCenter: %f %f %f\n",MagCenter.x,MagCenter.y,MagCenter.z);
+
+ return true;
+}
+
+
+// Calculate the center of a sphere that passes through p1, p2, p3, p4
+Vector3f MagCalibration::CalculateSphereCenter(const Vector3f& p1, const Vector3f& p2,
+ const Vector3f& p3, const Vector3f& p4)
+{
+ Matrix4f A;
+ int i;
+ Vector3f p[4];
+ p[0] = p1;
+ p[1] = p2;
+ p[2] = p3;
+ p[3] = p4;
+
+ for (i = 0; i < 4; i++)
+ {
+ A.M[i][0] = p[i].x;
+ A.M[i][1] = p[i].y;
+ A.M[i][2] = p[i].z;
+ A.M[i][3] = 1.0f;
+ }
+ float m11 = A.Determinant();
+ OVR_ASSERT(m11 != 0.0f);
+
+ for (i = 0; i < 4; i++)
+ {
+ A.M[i][0] = p[i].x*p[i].x + p[i].y*p[i].y + p[i].z*p[i].z;
+ A.M[i][1] = p[i].y;
+ A.M[i][2] = p[i].z;
+ A.M[i][3] = 1.0f;
+ }
+ float m12 = A.Determinant();
+
+ for (i = 0; i < 4; i++)
+ {
+ A.M[i][0] = p[i].x*p[i].x + p[i].y*p[i].y + p[i].z*p[i].z;
+ A.M[i][1] = p[i].x;
+ A.M[i][2] = p[i].z;
+ A.M[i][3] = 1.0f;
+ }
+ float m13 = A.Determinant();
+
+ for (i = 0; i < 4; i++)
+ {
+ A.M[i][0] = p[i].x*p[i].x + p[i].y*p[i].y + p[i].z*p[i].z;
+ A.M[i][1] = p[i].x;
+ A.M[i][2] = p[i].y;
+ A.M[i][3] = 1.0f;
+ }
+ float m14 = A.Determinant();
+
+ float c = 0.5f / m11;
+ return Vector3f(c*m12, -c*m13, c*m14);
+}
+
+// Distance from p4 to the nearest point on a plane through p1, p2, p3
+float MagCalibration::PointToPlaneDistance(const Vector3f& p1, const Vector3f& p2,
+ const Vector3f& p3, const Vector3f& p4)
+{
+ Vector3f v1 = p1 - p2;
+ Vector3f v2 = p1 - p3;
+ Vector3f planeNormal = v1.Cross(v2);
+ planeNormal.Normalize();
+ return (fabs((planeNormal * p4) - planeNormal * p1));
+}
+
+}}
diff --git a/LibOVR/Src/Util/Util_MagCalibration.h b/LibOVR/Src/Util/Util_MagCalibration.h
index 9371125..fd26d22 100644
--- a/LibOVR/Src/Util/Util_MagCalibration.h
+++ b/LibOVR/Src/Util/Util_MagCalibration.h
@@ -1,115 +1,121 @@
-/************************************************************************************
-
-PublicHeader: OVR.h
-Filename : Util_MagCalibration.h
-Content : Procedures for calibrating the magnetometer
-Created : April 16, 2013
-Authors : Steve LaValle, Andrew Reisse
-
-Copyright : Copyright 2013 Oculus VR, Inc. All Rights reserved.
-
-Use of this software is subject to the terms of the Oculus license
-agreement provided at the time of installation or download, or which
-otherwise accompanies this software in either electronic or hard copy form.
-
-*************************************************************************************/
-
-#ifndef OVR_Util_MagCalibration_h
-#define OVR_Util_MagCalibration_h
-
-#include "../OVR_SensorFusion.h"
-#include "../Kernel/OVR_String.h"
-#include "../Kernel/OVR_Log.h"
-
-namespace OVR { namespace Util {
-
-class MagCalibration
-{
-public:
- enum MagStatus
- {
- Mag_Uninitialized = 0,
- Mag_AutoCalibrating = 1,
- Mag_ManuallyCalibrating = 2,
- Mag_Calibrated = 3,
- };
-
- MagCalibration() :
- Status(Mag_Uninitialized),
- MinMagDistance(0.3f), MinQuatDistance(0.5f),
- SampleCount(0)
- {
- MinMagDistanceSq = MinMagDistance * MinMagDistance;
- MinQuatDistanceSq = MinQuatDistance * MinQuatDistance;
- }
-
- // Methods that are useful for either auto or manual calibration
- bool IsUnitialized() const { return Status == Mag_Uninitialized; }
- bool IsCalibrated() const { return Status == Mag_Calibrated; }
- int NumberOfSamples() const { return SampleCount; }
- int RequiredSampleCount() const { return 4; }
- void ClearCalibration(SensorFusion& sf)
- {
- Status = Mag_Uninitialized;
- SampleCount = 0;
- sf.ClearMagCalibration();
- };
-
- // Methods for automatic magnetometer calibration
- void BeginAutoCalibration(SensorFusion& sf);
- unsigned UpdateAutoCalibration(SensorFusion& sf);
- bool IsAutoCalibrating() const { return Status == Mag_AutoCalibrating; }
-
- // Methods for building a manual (user-guided) calibraton procedure
- void BeginManualCalibration(SensorFusion& sf);
- bool IsAcceptableSample(const Quatf& q, const Vector3f& m);
- bool InsertIfAcceptable(const Quatf& q, const Vector3f& m);
- // Returns true if successful, requiring that SampleCount = 4
- bool SetCalibration(SensorFusion& sf);
- bool IsManuallyCalibrating() const { return Status == Mag_ManuallyCalibrating; }
-
- // This is the minimum acceptable distance (Euclidean) between raw
- // magnetometer values to be acceptable for usage in calibration.
- void SetMinMagDistance(float dist)
- {
- MinMagDistance = dist;
- MinMagDistanceSq = MinMagDistance * MinMagDistance;
- }
-
- // The minimum acceptable distance (4D Euclidean) between orientations
- // to be acceptable for calibration usage.
- void SetMinQuatDistance(float dist)
- {
- MinQuatDistance = dist;
- MinQuatDistanceSq = MinQuatDistance * MinQuatDistance;
- }
-
- // A result of the calibration, which is the center of a sphere that
- // roughly approximates the magnetometer data.
- Vector3f GetMagCenter() const { return MagCenter; }
-
-private:
- // Determine the unique sphere through 4 non-coplanar points
- Vector3f CalculateSphereCenter(const Vector3f& p1, const Vector3f& p2,
- const Vector3f& p3, const Vector3f& p4);
-
- // Distance from p4 to the nearest point on a plane through p1, p2, p3
- float PointToPlaneDistance(const Vector3f& p1, const Vector3f& p2,
- const Vector3f& p3, const Vector3f& p4);
-
- Vector3f MagCenter;
- unsigned Status;
- float MinMagDistance;
- float MinQuatDistance;
- float MinMagDistanceSq;
- float MinQuatDistanceSq;
-
- unsigned SampleCount;
- Vector3f MagSamples[4];
- Quatf QuatSamples[4];
-
-};
-
-}}
-
-#endif
+/************************************************************************************
+
+PublicHeader: OVR.h
+Filename : Util_MagCalibration.h
+Content : Procedures for calibrating the magnetometer
+Created : April 16, 2013
+Authors : Steve LaValle, Andrew Reisse
+
+Copyright : Copyright 2013 Oculus VR, Inc. All Rights reserved.
+
+Use of this software is subject to the terms of the Oculus license
+agreement provided at the time of installation or download, or which
+otherwise accompanies this software in either electronic or hard copy form.
+
+*************************************************************************************/
+
+#ifndef OVR_Util_MagCalibration_h
+#define OVR_Util_MagCalibration_h
+
+#include "../OVR_SensorFusion.h"
+#include "../Kernel/OVR_String.h"
+#include "../Kernel/OVR_Log.h"
+
+namespace OVR { namespace Util {
+
+class MagCalibration
+{
+public:
+ enum MagStatus
+ {
+ Mag_Uninitialized = 0,
+ Mag_AutoCalibrating = 1,
+ Mag_ManuallyCalibrating = 2,
+ Mag_Calibrated = 3
+ };
+
+ MagCalibration() :
+ Stat(Mag_Uninitialized),
+ MinMagDistance(0.2f), MinQuatDistance(0.5f),
+ SampleCount(0)
+ {
+ MinMagDistanceSq = MinMagDistance * MinMagDistance;
+ MinQuatDistanceSq = MinQuatDistance * MinQuatDistance;
+ }
+
+ // Methods that are useful for either auto or manual calibration
+ bool IsUnitialized() const { return Stat == Mag_Uninitialized; }
+ bool IsCalibrated() const { return Stat == Mag_Calibrated; }
+ int NumberOfSamples() const { return SampleCount; }
+ int RequiredSampleCount() const { return 4; }
+ void AbortCalibration()
+ {
+ Stat = Mag_Uninitialized;
+ SampleCount = 0;
+ }
+
+ void ClearCalibration(SensorFusion& sf)
+ {
+ Stat = Mag_Uninitialized;
+ SampleCount = 0;
+ sf.ClearMagCalibration();
+ };
+
+ // Methods for automatic magnetometer calibration
+ void BeginAutoCalibration(SensorFusion& sf);
+ unsigned UpdateAutoCalibration(SensorFusion& sf);
+ bool IsAutoCalibrating() const { return Stat == Mag_AutoCalibrating; }
+
+ // Methods for building a manual (user-guided) calibraton procedure
+ void BeginManualCalibration(SensorFusion& sf);
+ bool IsAcceptableSample(const Quatf& q, const Vector3f& m);
+ bool InsertIfAcceptable(const Quatf& q, const Vector3f& m);
+ // Returns true if successful, requiring that SampleCount = 4
+ bool SetCalibration(SensorFusion& sf);
+ bool IsManuallyCalibrating() const { return Stat == Mag_ManuallyCalibrating; }
+
+ // This is the minimum acceptable distance (Euclidean) between raw
+ // magnetometer values to be acceptable for usage in calibration.
+ void SetMinMagDistance(float dist)
+ {
+ MinMagDistance = dist;
+ MinMagDistanceSq = MinMagDistance * MinMagDistance;
+ }
+
+ // The minimum acceptable distance (4D Euclidean) between orientations
+ // to be acceptable for calibration usage.
+ void SetMinQuatDistance(float dist)
+ {
+ MinQuatDistance = dist;
+ MinQuatDistanceSq = MinQuatDistance * MinQuatDistance;
+ }
+
+ // A result of the calibration, which is the center of a sphere that
+ // roughly approximates the magnetometer data.
+ Vector3f GetMagCenter() const { return MagCenter; }
+
+private:
+ // Determine the unique sphere through 4 non-coplanar points
+ Vector3f CalculateSphereCenter(const Vector3f& p1, const Vector3f& p2,
+ const Vector3f& p3, const Vector3f& p4);
+
+ // Distance from p4 to the nearest point on a plane through p1, p2, p3
+ float PointToPlaneDistance(const Vector3f& p1, const Vector3f& p2,
+ const Vector3f& p3, const Vector3f& p4);
+
+ Vector3f MagCenter;
+ unsigned Stat;
+ float MinMagDistance;
+ float MinQuatDistance;
+ float MinMagDistanceSq;
+ float MinQuatDistanceSq;
+
+ unsigned SampleCount;
+ Vector3f MagSamples[4];
+ Quatf QuatSamples[4];
+
+};
+
+}}
+
+#endif
diff --git a/LibOVR/Src/Util/Util_Render_Stereo.cpp b/LibOVR/Src/Util/Util_Render_Stereo.cpp
index 8986e30..b16cfb5 100644
--- a/LibOVR/Src/Util/Util_Render_Stereo.cpp
+++ b/LibOVR/Src/Util/Util_Render_Stereo.cpp
@@ -1,311 +1,314 @@
-/************************************************************************************
-
-Filename : Util_Render_Stereo.cpp
-Content : Stereo rendering configuration implementation
-Created : October 22, 2012
-Authors : Michael Antonov, Andrew Reisse
-
-Copyright : Copyright 2012 Oculus, Inc. All Rights reserved.
-
-Use of this software is subject to the terms of the Oculus Inc license
-agreement provided at the time of installation or download, or which
-otherwise accompanies this software in either electronic or hard copy form.
-
-*************************************************************************************/
-
-#include "Util_Render_Stereo.h"
-
-namespace OVR { namespace Util { namespace Render {
-
-
-//-----------------------------------------------------------------------------------
-
-// DistortionFnInverse computes the inverse of the distortion function on an argument.
-float DistortionConfig::DistortionFnInverse(float r)
-{
- OVR_ASSERT((r <= 10.0f));
-
- float s, d;
- float delta = r * 0.25f;
-
- s = r * 0.5f;
- d = fabs(r - DistortionFn(s));
-
- for (int i = 0; i < 20; i++)
- {
- float sUp = s + delta;
- float sDown = s - delta;
- float dUp = fabs(r - DistortionFn(sUp));
- float dDown = fabs(r - DistortionFn(sDown));
-
- if (dUp < d)
- {
- s = sUp;
- d = dUp;
- }
- else if (dDown < d)
- {
- s = sDown;
- d = dDown;
- }
- else
- {
- delta *= 0.5f;
- }
- }
-
- return s;
-}
-
-
-//-----------------------------------------------------------------------------------
-// **** StereoConfig Implementation
-
-StereoConfig::StereoConfig(StereoMode mode, const Viewport& vp)
- : Mode(mode),
- InterpupillaryDistance(0.064f), AspectMultiplier(1.0f),
- FullView(vp), DirtyFlag(true),
- YFov(0), Aspect(vp.w / float(vp.h)), ProjectionCenterOffset(0),
- OrthoPixelOffset(0)
-{
- // And default distortion for it.
- Distortion.SetCoefficients(1.0f, 0.22f, 0.24f);
- Distortion.Scale = 1.0f; // Will be computed later.
-
- // Fit left of the image.
- DistortionFitX = -1.0f;
- DistortionFitY = 0.0f;
-
- // Initialize "fake" default HMD values for testing without HMD plugged in.
- // These default values match those returned by the HMD.
- HMD.HResolution = 1280;
- HMD.VResolution = 800;
- HMD.HScreenSize = 0.14976f;
- HMD.VScreenSize = HMD.HScreenSize / (1280.0f / 800.0f);
- HMD.InterpupillaryDistance = InterpupillaryDistance;
- HMD.LensSeparationDistance = 0.0635f;
- HMD.EyeToScreenDistance = 0.041f;
- HMD.DistortionK[0] = Distortion.K[0];
- HMD.DistortionK[1] = Distortion.K[1];
- HMD.DistortionK[2] = Distortion.K[2];
- HMD.DistortionK[3] = 0;
-
- Set2DAreaFov(DegreeToRad(85.0f));
-}
-
-void StereoConfig::SetFullViewport(const Viewport& vp)
-{
- if (vp != FullView)
- {
- FullView = vp;
- DirtyFlag = true;
- }
-}
-
-void StereoConfig::SetHMDInfo(const HMDInfo& hmd)
-{
- HMD = hmd;
- Distortion.K[0] = hmd.DistortionK[0];
- Distortion.K[1] = hmd.DistortionK[1];
- Distortion.K[2] = hmd.DistortionK[2];
- Distortion.K[3] = hmd.DistortionK[3];
-
- Distortion.SetChromaticAberration(hmd.ChromaAbCorrection[0], hmd.ChromaAbCorrection[1],
- hmd.ChromaAbCorrection[2], hmd.ChromaAbCorrection[3]);
-
- DirtyFlag = true;
-}
-
-void StereoConfig::SetDistortionFitPointVP(float x, float y)
-{
- DistortionFitX = x;
- DistortionFitY = y;
- DirtyFlag = true;
-}
-
-void StereoConfig::SetDistortionFitPointPixels(float x, float y)
-{
- DistortionFitX = (4 * x / float(FullView.w)) - 1.0f;
- DistortionFitY = (2 * y / float(FullView.h)) - 1.0f;
- DirtyFlag = true;
-}
-
-void StereoConfig::Set2DAreaFov(float fovRadians)
-{
- Area2DFov = fovRadians;
- DirtyFlag = true;
-}
-
-
-const StereoEyeParams& StereoConfig::GetEyeRenderParams(StereoEye eye)
-{
- static const UByte eyeParamIndices[3] = { 0, 0, 1 };
-
- updateIfDirty();
- OVR_ASSERT(eye < sizeof(eyeParamIndices));
- return EyeRenderParams[eyeParamIndices[eye]];
-}
-
-
-void StereoConfig::updateComputedState()
-{
- // Need to compute all of the following:
- // - Aspect Ratio
- // - FOV
- // - Projection offsets for 3D
- // - Distortion XCenterOffset
- // - Update 2D
- // - Initialize EyeRenderParams
-
- // Compute aspect ratio. Stereo mode cuts width in half.
- Aspect = float(FullView.w) / float(FullView.h);
- Aspect *= (Mode == Stereo_None) ? 1.0f : 0.5f;
- Aspect *= AspectMultiplier;
-
- updateDistortionOffsetAndScale();
-
- // Compute Vertical FOV based on distance, distortion, etc.
- // Distance from vertical center to render vertical edge perceived through the lens.
- // This will be larger then normal screen size due to magnification & distortion.
- //
- // This percievedHalfRTDistance equation should hold as long as the render target
- // and display have the same aspect ratios. What we'd like to know is where the edge
- // of the render target will on the perceived screen surface. With NO LENS,
- // the answer would be:
- //
- // halfRTDistance = (VScreenSize / 2) * aspect *
- // DistortionFn_Inverse( DistortionScale / aspect )
- //
- // To model the optical lens we eliminates DistortionFn_Inverse. Aspect ratios
- // cancel out, so we get:
- //
- // halfRTDistance = (VScreenSize / 2) * DistortionScale
- //
- if (Mode == Stereo_None)
- {
- YFov = DegreeToRad(80.0f);
- }
- else
- {
- float percievedHalfRTDistance = (HMD.VScreenSize / 2) * Distortion.Scale;
- YFov = 2.0f * atan(percievedHalfRTDistance/HMD.EyeToScreenDistance);
- }
-
- updateProjectionOffset();
- update2D();
- updateEyeParams();
-
- DirtyFlag = false;
-}
-
-void StereoConfig::updateDistortionOffsetAndScale()
-{
- // Distortion center shift is stored separately, since it isn't affected
- // by the eye distance.
- float lensOffset = HMD.LensSeparationDistance * 0.5f;
- float lensShift = HMD.HScreenSize * 0.25f - lensOffset;
- float lensViewportShift = 4.0f * lensShift / HMD.HScreenSize;
- Distortion.XCenterOffset= lensViewportShift;
-
- // Compute distortion scale from DistortionFitX & DistortionFitY.
- // Fit value of 0.0 means "no fit".
- if ((fabs(DistortionFitX) < 0.0001f) && (fabs(DistortionFitY) < 0.0001f))
- {
- Distortion.Scale = 1.0f;
- }
- else
- {
- // Convert fit value to distortion-centered coordinates before fit radius
- // calculation.
- float stereoAspect = 0.5f * float(FullView.w) / float(FullView.h);
- float dx = DistortionFitX - Distortion.XCenterOffset;
- float dy = DistortionFitY / stereoAspect;
- float fitRadius = sqrt(dx * dx + dy * dy);
- Distortion.Scale = Distortion.DistortionFn(fitRadius)/fitRadius;
- }
-}
-
-void StereoConfig::updateProjectionOffset()
-{
- // Post-projection viewport coordinates range from (-1.0, 1.0), with the
- // center of the left viewport falling at (1/4) of horizontal screen size.
- // We need to shift this projection center to match with the lens center;
- // note that we don't use the IPD here due to collimated light property of the lens.
- // We compute this shift in physical units (meters) to
- // correct for different screen sizes and then rescale to viewport coordinates.
- float viewCenter = HMD.HScreenSize * 0.25f;
- float eyeProjectionShift = viewCenter - HMD.LensSeparationDistance*0.5f;
- ProjectionCenterOffset = 4.0f * eyeProjectionShift / HMD.HScreenSize;
-}
-
-void StereoConfig::update2D()
-{
- // Orthographic projection fakes a screen at a distance of 0.8m from the
- // eye, where hmd screen projection surface is at 0.05m distance.
- // This introduces an extra off-center pixel projection shift based on eye distance.
- // This offCenterShift is the pixel offset of the other camera's center
- // in your reference camera based on surface distance.
- float metersToPixels = (HMD.HResolution / HMD.HScreenSize);
- float lensDistanceScreenPixels= metersToPixels * HMD.LensSeparationDistance;
- float eyeDistanceScreenPixels = metersToPixels * InterpupillaryDistance;
- float offCenterShiftPixels = (HMD.EyeToScreenDistance / 0.8f) * eyeDistanceScreenPixels;
- float leftPixelCenter = (HMD.HResolution / 2) - lensDistanceScreenPixels * 0.5f;
- float rightPixelCenter = lensDistanceScreenPixels * 0.5f;
- float pixelDifference = leftPixelCenter - rightPixelCenter;
-
- // This computes the number of pixels that fit within specified 2D FOV (assuming
- // distortion scaling will be done).
- float percievedHalfScreenDistance = tan(Area2DFov * 0.5f) * HMD.EyeToScreenDistance;
- float vfovSize = 2.0f * percievedHalfScreenDistance / Distortion.Scale;
- FovPixels = HMD.VResolution * vfovSize / HMD.VScreenSize;
-
- // Create orthographic matrix.
- Matrix4f& m = OrthoCenter;
- m.SetIdentity();
- m.M[0][0] = FovPixels / (FullView.w * 0.5f);
- m.M[1][1] = -FovPixels / FullView.h;
- m.M[0][3] = 0;
- m.M[1][3] = 0;
- m.M[2][2] = 0;
-
- float orthoPixelOffset = (pixelDifference + offCenterShiftPixels/Distortion.Scale) * 0.5f;
- OrthoPixelOffset = orthoPixelOffset * 2.0f / FovPixels;
-}
-
-void StereoConfig::updateEyeParams()
-{
- // Projection matrix for the center eye, which the left/right matrices are based on.
- Matrix4f projCenter = Matrix4f::PerspectiveRH(YFov, Aspect, 0.01f, 1000.0f);
-
- switch(Mode)
- {
- case Stereo_None:
- {
- EyeRenderParams[0].Init(StereoEye_Center, FullView, 0, projCenter, OrthoCenter);
- }
- break;
-
- case Stereo_LeftRight_Multipass:
- {
- Matrix4f projLeft = Matrix4f::Translation(ProjectionCenterOffset, 0, 0) * projCenter,
- projRight = Matrix4f::Translation(-ProjectionCenterOffset, 0, 0) * projCenter;
-
- EyeRenderParams[0].Init(StereoEye_Left,
- Viewport(FullView.x, FullView.y, FullView.w/2, FullView.h),
- +InterpupillaryDistance * 0.5f, // World view shift.
- projLeft, OrthoCenter * Matrix4f::Translation(OrthoPixelOffset, 0, 0),
- &Distortion);
- EyeRenderParams[1].Init(StereoEye_Right,
- Viewport(FullView.x + FullView.w/2, FullView.y, FullView.w/2, FullView.h),
- -InterpupillaryDistance * 0.5f,
- projRight, OrthoCenter * Matrix4f::Translation(-OrthoPixelOffset, 0, 0),
- &Distortion);
- }
- break;
- }
-
-}
-
-
-}}} // OVR::Util::Render
-
+/************************************************************************************
+
+Filename : Util_Render_Stereo.cpp
+Content : Stereo rendering configuration implementation
+Created : October 22, 2012
+Authors : Michael Antonov, Andrew Reisse
+
+Copyright : Copyright 2012 Oculus, Inc. All Rights reserved.
+
+Use of this software is subject to the terms of the Oculus Inc license
+agreement provided at the time of installation or download, or which
+otherwise accompanies this software in either electronic or hard copy form.
+
+*************************************************************************************/
+
+#include "Util_Render_Stereo.h"
+
+namespace OVR { namespace Util { namespace Render {
+
+
+//-----------------------------------------------------------------------------------
+
+// DistortionFnInverse computes the inverse of the distortion function on an argument.
+float DistortionConfig::DistortionFnInverse(float r)
+{
+ OVR_ASSERT((r <= 10.0f));
+
+ float s, d;
+ float delta = r * 0.25f;
+
+ s = r * 0.5f;
+ d = fabs(r - DistortionFn(s));
+
+ for (int i = 0; i < 20; i++)
+ {
+ float sUp = s + delta;
+ float sDown = s - delta;
+ float dUp = fabs(r - DistortionFn(sUp));
+ float dDown = fabs(r - DistortionFn(sDown));
+
+ if (dUp < d)
+ {
+ s = sUp;
+ d = dUp;
+ }
+ else if (dDown < d)
+ {
+ s = sDown;
+ d = dDown;
+ }
+ else
+ {
+ delta *= 0.5f;
+ }
+ }
+
+ return s;
+}
+
+
+//-----------------------------------------------------------------------------------
+// **** StereoConfig Implementation
+
+StereoConfig::StereoConfig(StereoMode mode, const Viewport& vp)
+ : Mode(mode),
+ InterpupillaryDistance(0.064f), AspectMultiplier(1.0f),
+ FullView(vp), DirtyFlag(true), IPDOverride(false),
+ YFov(0), Aspect(vp.w / float(vp.h)), ProjectionCenterOffset(0),
+ OrthoPixelOffset(0)
+{
+ // And default distortion for it.
+ Distortion.SetCoefficients(1.0f, 0.22f, 0.24f);
+ Distortion.Scale = 1.0f; // Will be computed later.
+
+ // Fit left of the image.
+ DistortionFitX = -1.0f;
+ DistortionFitY = 0.0f;
+
+ // Initialize "fake" default HMD values for testing without HMD plugged in.
+ // These default values match those returned by the HMD.
+ HMD.HResolution = 1280;
+ HMD.VResolution = 800;
+ HMD.HScreenSize = 0.14976f;
+ HMD.VScreenSize = HMD.HScreenSize / (1280.0f / 800.0f);
+ HMD.InterpupillaryDistance = InterpupillaryDistance;
+ HMD.LensSeparationDistance = 0.0635f;
+ HMD.EyeToScreenDistance = 0.041f;
+ HMD.DistortionK[0] = Distortion.K[0];
+ HMD.DistortionK[1] = Distortion.K[1];
+ HMD.DistortionK[2] = Distortion.K[2];
+ HMD.DistortionK[3] = 0;
+
+ Set2DAreaFov(DegreeToRad(85.0f));
+}
+
+void StereoConfig::SetFullViewport(const Viewport& vp)
+{
+ if (vp != FullView)
+ {
+ FullView = vp;
+ DirtyFlag = true;
+ }
+}
+
+void StereoConfig::SetHMDInfo(const HMDInfo& hmd)
+{
+ HMD = hmd;
+ Distortion.K[0] = hmd.DistortionK[0];
+ Distortion.K[1] = hmd.DistortionK[1];
+ Distortion.K[2] = hmd.DistortionK[2];
+ Distortion.K[3] = hmd.DistortionK[3];
+
+ Distortion.SetChromaticAberration(hmd.ChromaAbCorrection[0], hmd.ChromaAbCorrection[1],
+ hmd.ChromaAbCorrection[2], hmd.ChromaAbCorrection[3]);
+
+ if (!IPDOverride)
+ InterpupillaryDistance = HMD.InterpupillaryDistance;
+
+ DirtyFlag = true;
+}
+
+void StereoConfig::SetDistortionFitPointVP(float x, float y)
+{
+ DistortionFitX = x;
+ DistortionFitY = y;
+ DirtyFlag = true;
+}
+
+void StereoConfig::SetDistortionFitPointPixels(float x, float y)
+{
+ DistortionFitX = (4 * x / float(FullView.w)) - 1.0f;
+ DistortionFitY = (2 * y / float(FullView.h)) - 1.0f;
+ DirtyFlag = true;
+}
+
+void StereoConfig::Set2DAreaFov(float fovRadians)
+{
+ Area2DFov = fovRadians;
+ DirtyFlag = true;
+}
+
+
+const StereoEyeParams& StereoConfig::GetEyeRenderParams(StereoEye eye)
+{
+ static const UByte eyeParamIndices[3] = { 0, 0, 1 };
+
+ updateIfDirty();
+ OVR_ASSERT(eye < sizeof(eyeParamIndices));
+ return EyeRenderParams[eyeParamIndices[eye]];
+}
+
+
+void StereoConfig::updateComputedState()
+{
+ // Need to compute all of the following:
+ // - Aspect Ratio
+ // - FOV
+ // - Projection offsets for 3D
+ // - Distortion XCenterOffset
+ // - Update 2D
+ // - Initialize EyeRenderParams
+
+ // Compute aspect ratio. Stereo mode cuts width in half.
+ Aspect = float(FullView.w) / float(FullView.h);
+ Aspect *= (Mode == Stereo_None) ? 1.0f : 0.5f;
+ Aspect *= AspectMultiplier;
+
+ updateDistortionOffsetAndScale();
+
+ // Compute Vertical FOV based on distance, distortion, etc.
+ // Distance from vertical center to render vertical edge perceived through the lens.
+ // This will be larger then normal screen size due to magnification & distortion.
+ //
+ // This percievedHalfRTDistance equation should hold as long as the render target
+ // and display have the same aspect ratios. What we'd like to know is where the edge
+ // of the render target will on the perceived screen surface. With NO LENS,
+ // the answer would be:
+ //
+ // halfRTDistance = (VScreenSize / 2) * aspect *
+ // DistortionFn_Inverse( DistortionScale / aspect )
+ //
+ // To model the optical lens we eliminates DistortionFn_Inverse. Aspect ratios
+ // cancel out, so we get:
+ //
+ // halfRTDistance = (VScreenSize / 2) * DistortionScale
+ //
+ if (Mode == Stereo_None)
+ {
+ YFov = DegreeToRad(80.0f);
+ }
+ else
+ {
+ float percievedHalfRTDistance = (HMD.VScreenSize / 2) * Distortion.Scale;
+ YFov = 2.0f * atan(percievedHalfRTDistance/HMD.EyeToScreenDistance);
+ }
+
+ updateProjectionOffset();
+ update2D();
+ updateEyeParams();
+
+ DirtyFlag = false;
+}
+
+void StereoConfig::updateDistortionOffsetAndScale()
+{
+ // Distortion center shift is stored separately, since it isn't affected
+ // by the eye distance.
+ float lensOffset = HMD.LensSeparationDistance * 0.5f;
+ float lensShift = HMD.HScreenSize * 0.25f - lensOffset;
+ float lensViewportShift = 4.0f * lensShift / HMD.HScreenSize;
+ Distortion.XCenterOffset= lensViewportShift;
+
+ // Compute distortion scale from DistortionFitX & DistortionFitY.
+ // Fit value of 0.0 means "no fit".
+ if ((fabs(DistortionFitX) < 0.0001f) && (fabs(DistortionFitY) < 0.0001f))
+ {
+ Distortion.Scale = 1.0f;
+ }
+ else
+ {
+ // Convert fit value to distortion-centered coordinates before fit radius
+ // calculation.
+ float stereoAspect = 0.5f * float(FullView.w) / float(FullView.h);
+ float dx = DistortionFitX - Distortion.XCenterOffset;
+ float dy = DistortionFitY / stereoAspect;
+ float fitRadius = sqrt(dx * dx + dy * dy);
+ Distortion.Scale = Distortion.DistortionFn(fitRadius)/fitRadius;
+ }
+}
+
+void StereoConfig::updateProjectionOffset()
+{
+ // Post-projection viewport coordinates range from (-1.0, 1.0), with the
+ // center of the left viewport falling at (1/4) of horizontal screen size.
+ // We need to shift this projection center to match with the lens center;
+ // note that we don't use the IPD here due to collimated light property of the lens.
+ // We compute this shift in physical units (meters) to
+ // correct for different screen sizes and then rescale to viewport coordinates.
+ float viewCenter = HMD.HScreenSize * 0.25f;
+ float eyeProjectionShift = viewCenter - HMD.LensSeparationDistance*0.5f;
+ ProjectionCenterOffset = 4.0f * eyeProjectionShift / HMD.HScreenSize;
+}
+
+void StereoConfig::update2D()
+{
+ // Orthographic projection fakes a screen at a distance of 0.8m from the
+ // eye, where hmd screen projection surface is at 0.05m distance.
+ // This introduces an extra off-center pixel projection shift based on eye distance.
+ // This offCenterShift is the pixel offset of the other camera's center
+ // in your reference camera based on surface distance.
+ float metersToPixels = (HMD.HResolution / HMD.HScreenSize);
+ float lensDistanceScreenPixels= metersToPixels * HMD.LensSeparationDistance;
+ float eyeDistanceScreenPixels = metersToPixels * InterpupillaryDistance;
+ float offCenterShiftPixels = (HMD.EyeToScreenDistance / 0.8f) * eyeDistanceScreenPixels;
+ float leftPixelCenter = (HMD.HResolution / 2) - lensDistanceScreenPixels * 0.5f;
+ float rightPixelCenter = lensDistanceScreenPixels * 0.5f;
+ float pixelDifference = leftPixelCenter - rightPixelCenter;
+
+ // This computes the number of pixels that fit within specified 2D FOV (assuming
+ // distortion scaling will be done).
+ float percievedHalfScreenDistance = tan(Area2DFov * 0.5f) * HMD.EyeToScreenDistance;
+ float vfovSize = 2.0f * percievedHalfScreenDistance / Distortion.Scale;
+ FovPixels = HMD.VResolution * vfovSize / HMD.VScreenSize;
+
+ // Create orthographic matrix.
+ Matrix4f& m = OrthoCenter;
+ m.SetIdentity();
+ m.M[0][0] = FovPixels / (FullView.w * 0.5f);
+ m.M[1][1] = -FovPixels / FullView.h;
+ m.M[0][3] = 0;
+ m.M[1][3] = 0;
+ m.M[2][2] = 0;
+
+ float orthoPixelOffset = (pixelDifference + offCenterShiftPixels/Distortion.Scale) * 0.5f;
+ OrthoPixelOffset = orthoPixelOffset * 2.0f / FovPixels;
+}
+
+void StereoConfig::updateEyeParams()
+{
+ // Projection matrix for the center eye, which the left/right matrices are based on.
+ Matrix4f projCenter = Matrix4f::PerspectiveRH(YFov, Aspect, 0.01f, 1000.0f);
+
+ switch(Mode)
+ {
+ case Stereo_None:
+ {
+ EyeRenderParams[0].Init(StereoEye_Center, FullView, 0, projCenter, OrthoCenter);
+ }
+ break;
+
+ case Stereo_LeftRight_Multipass:
+ {
+ Matrix4f projLeft = Matrix4f::Translation(ProjectionCenterOffset, 0, 0) * projCenter,
+ projRight = Matrix4f::Translation(-ProjectionCenterOffset, 0, 0) * projCenter;
+
+ EyeRenderParams[0].Init(StereoEye_Left,
+ Viewport(FullView.x, FullView.y, FullView.w/2, FullView.h),
+ +InterpupillaryDistance * 0.5f, // World view shift.
+ projLeft, OrthoCenter * Matrix4f::Translation(OrthoPixelOffset, 0, 0),
+ &Distortion);
+ EyeRenderParams[1].Init(StereoEye_Right,
+ Viewport(FullView.x + FullView.w/2, FullView.y, FullView.w/2, FullView.h),
+ -InterpupillaryDistance * 0.5f,
+ projRight, OrthoCenter * Matrix4f::Translation(-OrthoPixelOffset, 0, 0),
+ &Distortion);
+ }
+ break;
+ }
+
+}
+
+
+}}} // OVR::Util::Render
+
diff --git a/LibOVR/Src/Util/Util_Render_Stereo.h b/LibOVR/Src/Util/Util_Render_Stereo.h
index 9dc110c..9ac4002 100644
--- a/LibOVR/Src/Util/Util_Render_Stereo.h
+++ b/LibOVR/Src/Util/Util_Render_Stereo.h
@@ -1,299 +1,300 @@
-/************************************************************************************
-
-PublicHeader: OVR.h
-Filename : Util_Render_Stereo.h
-Content : Sample stereo rendering configuration classes.
-Created : October 22, 2012
-Authors : Michael Antonov
-
-Copyright : Copyright 2012 Oculus, Inc. All Rights reserved.
-
-Use of this software is subject to the terms of the Oculus Inc license
-agreement provided at the time of installation or download, or which
-otherwise accompanies this software in either electronic or hard copy form.
-
-*************************************************************************************/
-
-#ifndef OVR_Util_Render_Stereo_h
-#define OVR_Util_Render_Stereo_h
-
-#include "../OVR_Device.h"
-
-namespace OVR { namespace Util { namespace Render {
-
-
-//-----------------------------------------------------------------------------------
-// ***** Stereo Enumerations
-
-// StereoMode describes rendering modes that can be used by StereoConfig.
-// These modes control whether stereo rendering is used or not (Stereo_None),
-// and how it is implemented.
-enum StereoMode
-{
- Stereo_None = 0,
- Stereo_LeftRight_Multipass = 1
-};
-
-
-// StereoEye specifies which eye we are rendering for; it is used to
-// retrieve StereoEyeParams.
-enum StereoEye
-{
- StereoEye_Center,
- StereoEye_Left,
- StereoEye_Right
-};
-
-
-//-----------------------------------------------------------------------------------
-// ***** Viewport
-
-// Viewport describes a rectangular area used for rendering, in pixels.
-struct Viewport
-{
- int x, y;
- int w, h;
-
- Viewport() {}
- Viewport(int x1, int y1, int w1, int h1) : x(x1), y(y1), w(w1), h(h1) { }
-
- bool operator == (const Viewport& vp) const
- { return (x == vp.x) && (y == vp.y) && (w == vp.w) && (h == vp.h); }
- bool operator != (const Viewport& vp) const
- { return !operator == (vp); }
-};
-
-
-//-----------------------------------------------------------------------------------
-// ***** DistortionConfig
-
-// DistortionConfig Provides controls for the distortion shader.
-// - K[0] - K[3] are coefficients for the distortion function.
-// - XCenterOffset is the offset of lens distortion center from the
-// center of one-eye screen half. [-1, 1] Range.
-// - Scale is a factor of how much larger will the input image be,
-// with a factor of 1.0f being no scaling. An inverse of this
-// value is applied to sampled UV coordinates (1/Scale).
-// - ChromaticAberration is an array of parameters for controlling
-// additional Red and Blue scaling in order to reduce chromatic aberration
-// caused by the Rift lenses.
-class DistortionConfig
-{
-public:
- DistortionConfig(float k0 = 1.0f, float k1 = 0.0f, float k2 = 0.0f, float k3 = 0.0f)
- : XCenterOffset(0), YCenterOffset(0), Scale(1.0f)
- {
- SetCoefficients(k0, k1, k2, k3);
- SetChromaticAberration();
- }
-
- void SetCoefficients(float k0, float k1 = 0.0f, float k2 = 0.0f, float k3 = 0.0f)
- { K[0] = k0; K[1] = k1; K[2] = k2; K[3] = k3; }
-
- void SetChromaticAberration(float red1 = 1.0f, float red2 = 0.0f, float blue1 = 1.0f, float blue2 = 0.0f)
- { ChromaticAberration[0] = red1; ChromaticAberration[1] = red2; ChromaticAberration[2] = blue1; ChromaticAberration[3] = blue2; }
-
-
- // DistortionFn applies distortion equation to the argument. The returned
- // value should match distortion equation used in shader.
- float DistortionFn(float r) const
- {
- float rsq = r * r;
- float scale = r * (K[0] + K[1] * rsq + K[2] * rsq * rsq + K[3] * rsq * rsq * rsq);
- return scale;
- }
-
- // DistortionFnInverse computes the inverse of the distortion function on an argument.
- float DistortionFnInverse(float r);
-
- float K[4];
- float XCenterOffset, YCenterOffset;
- float Scale;
-
- float ChromaticAberration[4]; // Additional per-channel scaling is applied after distortion:
- // Index [0] - Red channel constant coefficient.
- // Index [1] - Red channel r^2 coefficient.
- // Index [2] - Blue channel constant coefficient.
- // Index [3] - Blue channel r^2 coefficient.
-};
-
-
-//-----------------------------------------------------------------------------------
-// ***** StereoEyeParams
-
-// StereoEyeParams describes RenderDevice configuration needed to render
-// the scene for one eye.
-class StereoEyeParams
-{
-public:
- StereoEye Eye;
- Viewport VP; // Viewport that we are rendering to
- const DistortionConfig* pDistortion;
-
- Matrix4f ViewAdjust; // Translation to be applied to view matrix.
- Matrix4f Projection; // Projection matrix used with this eye.
- Matrix4f OrthoProjection; // Orthographic projection used with this eye.
-
- void Init(StereoEye eye, const Viewport &vp, float vofs,
- const Matrix4f& proj, const Matrix4f& orthoProj,
- const DistortionConfig* distortion = 0)
- {
- Eye = eye;
- VP = vp;
- ViewAdjust = Matrix4f::Translation(Vector3f(vofs,0,0));
- Projection = proj;
- OrthoProjection = orthoProj;
- pDistortion = distortion;
- }
-};
-
-
-//-----------------------------------------------------------------------------------
-// ***** StereoConfig
-
-// StereoConfig maintains a scene stereo state and allow switching between different
-// stereo rendering modes. To support rendering, StereoConfig keeps track of HMD
-// variables such as screen size, eye-to-screen distance and distortion, and computes
-// extra data such as FOV and distortion center offsets based on it. Rendering
-// parameters are returned though StereoEyeParams for each eye.
-//
-// Beyond regular 3D projection, this class supports rendering a 2D orthographic
-// surface for UI and text. The 2D surface will be defined as fitting within a 2D
-// field of view (85 degrees by default) and used [-1,1] coordinate system with
-// square pixels. The (0,0) coordinate corresponds to eye center location
-// that is properly adjusted during rendering through SterepRenderParams::Adjust2D.
-// Genreally speaking, text outside [-1,1] coordinate range will not be readable.
-
-class StereoConfig
-{
-public:
-
- StereoConfig(StereoMode mode = Stereo_LeftRight_Multipass,
- const Viewport& fullViewport = Viewport(0,0, 1280,800));
-
-
- // *** Modifiable State Access
-
- // Sets a stereo rendering mode and updates internal cached
- // state (matrices, per-eye view) based on it.
- void SetStereoMode(StereoMode mode) { Mode = mode; DirtyFlag = true; }
- StereoMode GetStereoMode() const { return Mode; }
-
- // Sets HMD parameters; also initializes distortion coefficients.
- void SetHMDInfo(const HMDInfo& hmd);
- const HMDInfo& GetHMDInfo() const { return HMD; }
-
- // Query physical eye-to-screen distance in meters, which combines screen-to-lens and
- // and lens-to-eye pupil distances. Modifying this value adjusts FOV.
- float GetEyeToScreenDistance() const { return HMD.EyeToScreenDistance; }
- void SetEyeToScreenDistance(float esd) { HMD.EyeToScreenDistance = esd; DirtyFlag = true; }
-
- // Interpupillary distance used for stereo, in meters. Default is 0.064m (64 mm).
- void SetIPD(float ipd) { InterpupillaryDistance = ipd; DirtyFlag = true; }
- float GetIPD() const { return InterpupillaryDistance; }
-
- // Set full render target viewport; for HMD this includes both eyes.
- void SetFullViewport(const Viewport& vp);
- const Viewport& GetFullViewport() const { return FullView; }
-
- // Aspect ratio defaults to ((w/h)*multiplier) computed per eye.
- // Aspect multiplier allows adjusting aspect ratio consistently for Stereo/NoStereo.
- void SetAspectMultiplier(float m) { AspectMultiplier = m; DirtyFlag = true; }
- float GetAspectMultiplier() const { return AspectMultiplier; }
-
-
- // For the distorted image to fill rendered viewport, input texture render target needs to be
- // scaled by DistortionScale before sampling. The scale factor is computed by fitting a point
- // on of specified radius from a distortion center, more easily specified as a coordinate.
- // SetDistortionFitPointVP sets the (x,y) coordinate of the point that scale will be "fit" to,
- // assuming [-1,1] coordinate range for full left-eye viewport. A fit point is a location
- // where source (pre-distortion) and target (post-distortion) image match each other.
- // For the right eye, the interpretation of 'u' will be inverted.
- void SetDistortionFitPointVP(float x, float y);
- // SetDistortionFitPointPixels sets the (x,y) coordinate of the point that scale will be "fit" to,
- // specified in pixeld for full left-eye texture.
- void SetDistortionFitPointPixels(float x, float y);
-
- // Changes all distortion settings.
- // Note that setting HMDInfo also changes Distortion coefficients.
- void SetDistortionConfig(const DistortionConfig& d) { Distortion = d; DirtyFlag = true; }
-
- // Modify distortion coefficients; useful for adjustment tweaking.
- void SetDistortionK(int i, float k) { Distortion.K[i] = k; DirtyFlag = true; }
- float GetDistortionK(int i) const { return Distortion.K[i]; }
-
- // Sets the fieldOfView that the 2D coordinate area stretches to.
- void Set2DAreaFov(float fovRadians);
-
-
- // *** Computed State
-
- // Return current aspect ratio.
- float GetAspect() { updateIfDirty(); return Aspect; }
-
- // Return computed vertical FOV in radians/degrees.
- float GetYFOVRadians() { updateIfDirty(); return YFov; }
- float GetYFOVDegrees() { return RadToDegree(GetYFOVRadians()); }
-
- // Query horizontal projection center offset as a distance away from the
- // one-eye [-1,1] unit viewport.
- // Positive return value should be used for left eye, negative for right eye.
- float GetProjectionCenterOffset() { updateIfDirty(); return ProjectionCenterOffset; }
-
- // GetDistortionConfig isn't const because XCenterOffset bay need to be recomputed.
- const DistortionConfig& GetDistortionConfig() { updateIfDirty(); return Distortion; }
-
- // Returns DistortionScale factor by which input texture size is increased to make
- // post-distortion result distortion fit the viewport.
- float GetDistortionScale() { updateIfDirty(); return Distortion.Scale; }
-
- // Returns the size of a pixel within 2D coordinate system.
- float Get2DUnitPixel() { updateIfDirty(); return (2.0f / (FovPixels * Distortion.Scale)); }
-
- // Returns full set of Stereo rendering parameters for the specified eye.
- const StereoEyeParams& GetEyeRenderParams(StereoEye eye);
-
-private:
-
- void updateIfDirty() { if (DirtyFlag) updateComputedState(); }
- void updateComputedState();
-
- void updateDistortionOffsetAndScale();
- void updateProjectionOffset();
- void update2D();
- void updateEyeParams();
-
-
- // *** Modifiable State
-
- StereoMode Mode;
- float InterpupillaryDistance;
- float AspectMultiplier; // Multiplied into aspect ratio to change it.
- HMDInfo HMD;
- DistortionConfig Distortion;
- float DistortionFitX, DistortionFitY; // In [-1,1] half-screen viewport units.
- Viewport FullView; // Entire window viewport.
-
- float Area2DFov; // FOV range mapping to [-1, 1] 2D area.
-
- // *** Computed State
-
- bool DirtyFlag; // Set when any if the modifiable state changed.
- float YFov; // Vertical FOV.
- float Aspect; // Aspect ratio: (w/h)*AspectMultiplier.
- float ProjectionCenterOffset;
- StereoEyeParams EyeRenderParams[2];
-
-
- // ** 2D Rendering
-
- // Number of 2D pixels in the FOV. This defines [-1,1] coordinate range for 2D.
- float FovPixels;
- Matrix4f OrthoCenter;
- float OrthoPixelOffset;
-};
-
-
-}}} // OVR::Util::Render
-
-#endif
+/************************************************************************************
+
+PublicHeader: OVR.h
+Filename : Util_Render_Stereo.h
+Content : Sample stereo rendering configuration classes.
+Created : October 22, 2012
+Authors : Michael Antonov
+
+Copyright : Copyright 2012 Oculus, Inc. All Rights reserved.
+
+Use of this software is subject to the terms of the Oculus Inc license
+agreement provided at the time of installation or download, or which
+otherwise accompanies this software in either electronic or hard copy form.
+
+*************************************************************************************/
+
+#ifndef OVR_Util_Render_Stereo_h
+#define OVR_Util_Render_Stereo_h
+
+#include "../OVR_Device.h"
+
+namespace OVR { namespace Util { namespace Render {
+
+
+//-----------------------------------------------------------------------------------
+// ***** Stereo Enumerations
+
+// StereoMode describes rendering modes that can be used by StereoConfig.
+// These modes control whether stereo rendering is used or not (Stereo_None),
+// and how it is implemented.
+enum StereoMode
+{
+ Stereo_None = 0,
+ Stereo_LeftRight_Multipass = 1
+};
+
+
+// StereoEye specifies which eye we are rendering for; it is used to
+// retrieve StereoEyeParams.
+enum StereoEye
+{
+ StereoEye_Center,
+ StereoEye_Left,
+ StereoEye_Right
+};
+
+
+//-----------------------------------------------------------------------------------
+// ***** Viewport
+
+// Viewport describes a rectangular area used for rendering, in pixels.
+struct Viewport
+{
+ int x, y;
+ int w, h;
+
+ Viewport() {}
+ Viewport(int x1, int y1, int w1, int h1) : x(x1), y(y1), w(w1), h(h1) { }
+
+ bool operator == (const Viewport& vp) const
+ { return (x == vp.x) && (y == vp.y) && (w == vp.w) && (h == vp.h); }
+ bool operator != (const Viewport& vp) const
+ { return !operator == (vp); }
+};
+
+
+//-----------------------------------------------------------------------------------
+// ***** DistortionConfig
+
+// DistortionConfig Provides controls for the distortion shader.
+// - K[0] - K[3] are coefficients for the distortion function.
+// - XCenterOffset is the offset of lens distortion center from the
+// center of one-eye screen half. [-1, 1] Range.
+// - Scale is a factor of how much larger will the input image be,
+// with a factor of 1.0f being no scaling. An inverse of this
+// value is applied to sampled UV coordinates (1/Scale).
+// - ChromaticAberration is an array of parameters for controlling
+// additional Red and Blue scaling in order to reduce chromatic aberration
+// caused by the Rift lenses.
+class DistortionConfig
+{
+public:
+ DistortionConfig(float k0 = 1.0f, float k1 = 0.0f, float k2 = 0.0f, float k3 = 0.0f)
+ : XCenterOffset(0), YCenterOffset(0), Scale(1.0f)
+ {
+ SetCoefficients(k0, k1, k2, k3);
+ SetChromaticAberration();
+ }
+
+ void SetCoefficients(float k0, float k1 = 0.0f, float k2 = 0.0f, float k3 = 0.0f)
+ { K[0] = k0; K[1] = k1; K[2] = k2; K[3] = k3; }
+
+ void SetChromaticAberration(float red1 = 1.0f, float red2 = 0.0f, float blue1 = 1.0f, float blue2 = 0.0f)
+ { ChromaticAberration[0] = red1; ChromaticAberration[1] = red2; ChromaticAberration[2] = blue1; ChromaticAberration[3] = blue2; }
+
+
+ // DistortionFn applies distortion equation to the argument. The returned
+ // value should match distortion equation used in shader.
+ float DistortionFn(float r) const
+ {
+ float rsq = r * r;
+ float scale = r * (K[0] + K[1] * rsq + K[2] * rsq * rsq + K[3] * rsq * rsq * rsq);
+ return scale;
+ }
+
+ // DistortionFnInverse computes the inverse of the distortion function on an argument.
+ float DistortionFnInverse(float r);
+
+ float K[4];
+ float XCenterOffset, YCenterOffset;
+ float Scale;
+
+ float ChromaticAberration[4]; // Additional per-channel scaling is applied after distortion:
+ // Index [0] - Red channel constant coefficient.
+ // Index [1] - Red channel r^2 coefficient.
+ // Index [2] - Blue channel constant coefficient.
+ // Index [3] - Blue channel r^2 coefficient.
+};
+
+
+//-----------------------------------------------------------------------------------
+// ***** StereoEyeParams
+
+// StereoEyeParams describes RenderDevice configuration needed to render
+// the scene for one eye.
+class StereoEyeParams
+{
+public:
+ StereoEye Eye;
+ Viewport VP; // Viewport that we are rendering to
+ const DistortionConfig* pDistortion;
+
+ Matrix4f ViewAdjust; // Translation to be applied to view matrix.
+ Matrix4f Projection; // Projection matrix used with this eye.
+ Matrix4f OrthoProjection; // Orthographic projection used with this eye.
+
+ void Init(StereoEye eye, const Viewport &vp, float vofs,
+ const Matrix4f& proj, const Matrix4f& orthoProj,
+ const DistortionConfig* distortion = 0)
+ {
+ Eye = eye;
+ VP = vp;
+ ViewAdjust = Matrix4f::Translation(Vector3f(vofs,0,0));
+ Projection = proj;
+ OrthoProjection = orthoProj;
+ pDistortion = distortion;
+ }
+};
+
+
+//-----------------------------------------------------------------------------------
+// ***** StereoConfig
+
+// StereoConfig maintains a scene stereo state and allow switching between different
+// stereo rendering modes. To support rendering, StereoConfig keeps track of HMD
+// variables such as screen size, eye-to-screen distance and distortion, and computes
+// extra data such as FOV and distortion center offsets based on it. Rendering
+// parameters are returned though StereoEyeParams for each eye.
+//
+// Beyond regular 3D projection, this class supports rendering a 2D orthographic
+// surface for UI and text. The 2D surface will be defined as fitting within a 2D
+// field of view (85 degrees by default) and used [-1,1] coordinate system with
+// square pixels. The (0,0) coordinate corresponds to eye center location
+// that is properly adjusted during rendering through SterepRenderParams::Adjust2D.
+// Genreally speaking, text outside [-1,1] coordinate range will not be readable.
+
+class StereoConfig
+{
+public:
+
+ StereoConfig(StereoMode mode = Stereo_LeftRight_Multipass,
+ const Viewport& fullViewport = Viewport(0,0, 1280,800));
+
+
+ // *** Modifiable State Access
+
+ // Sets a stereo rendering mode and updates internal cached
+ // state (matrices, per-eye view) based on it.
+ void SetStereoMode(StereoMode mode) { Mode = mode; DirtyFlag = true; }
+ StereoMode GetStereoMode() const { return Mode; }
+
+ // Sets HMD parameters; also initializes distortion coefficients.
+ void SetHMDInfo(const HMDInfo& hmd);
+ const HMDInfo& GetHMDInfo() const { return HMD; }
+
+ // Query physical eye-to-screen distance in meters, which combines screen-to-lens and
+ // and lens-to-eye pupil distances. Modifying this value adjusts FOV.
+ float GetEyeToScreenDistance() const { return HMD.EyeToScreenDistance; }
+ void SetEyeToScreenDistance(float esd) { HMD.EyeToScreenDistance = esd; DirtyFlag = true; }
+
+ // Interpupillary distance used for stereo, in meters. Default is 0.064m (64 mm).
+ void SetIPD(float ipd) { InterpupillaryDistance = ipd; IPDOverride = DirtyFlag = true; }
+ float GetIPD() const { return InterpupillaryDistance; }
+
+ // Set full render target viewport; for HMD this includes both eyes.
+ void SetFullViewport(const Viewport& vp);
+ const Viewport& GetFullViewport() const { return FullView; }
+
+ // Aspect ratio defaults to ((w/h)*multiplier) computed per eye.
+ // Aspect multiplier allows adjusting aspect ratio consistently for Stereo/NoStereo.
+ void SetAspectMultiplier(float m) { AspectMultiplier = m; DirtyFlag = true; }
+ float GetAspectMultiplier() const { return AspectMultiplier; }
+
+
+ // For the distorted image to fill rendered viewport, input texture render target needs to be
+ // scaled by DistortionScale before sampling. The scale factor is computed by fitting a point
+ // on of specified radius from a distortion center, more easily specified as a coordinate.
+ // SetDistortionFitPointVP sets the (x,y) coordinate of the point that scale will be "fit" to,
+ // assuming [-1,1] coordinate range for full left-eye viewport. A fit point is a location
+ // where source (pre-distortion) and target (post-distortion) image match each other.
+ // For the right eye, the interpretation of 'u' will be inverted.
+ void SetDistortionFitPointVP(float x, float y);
+ // SetDistortionFitPointPixels sets the (x,y) coordinate of the point that scale will be "fit" to,
+ // specified in pixeld for full left-eye texture.
+ void SetDistortionFitPointPixels(float x, float y);
+
+ // Changes all distortion settings.
+ // Note that setting HMDInfo also changes Distortion coefficients.
+ void SetDistortionConfig(const DistortionConfig& d) { Distortion = d; DirtyFlag = true; }
+
+ // Modify distortion coefficients; useful for adjustment tweaking.
+ void SetDistortionK(int i, float k) { Distortion.K[i] = k; DirtyFlag = true; }
+ float GetDistortionK(int i) const { return Distortion.K[i]; }
+
+ // Sets the fieldOfView that the 2D coordinate area stretches to.
+ void Set2DAreaFov(float fovRadians);
+
+
+ // *** Computed State
+
+ // Return current aspect ratio.
+ float GetAspect() { updateIfDirty(); return Aspect; }
+
+ // Return computed vertical FOV in radians/degrees.
+ float GetYFOVRadians() { updateIfDirty(); return YFov; }
+ float GetYFOVDegrees() { return RadToDegree(GetYFOVRadians()); }
+
+ // Query horizontal projection center offset as a distance away from the
+ // one-eye [-1,1] unit viewport.
+ // Positive return value should be used for left eye, negative for right eye.
+ float GetProjectionCenterOffset() { updateIfDirty(); return ProjectionCenterOffset; }
+
+ // GetDistortionConfig isn't const because XCenterOffset bay need to be recomputed.
+ const DistortionConfig& GetDistortionConfig() { updateIfDirty(); return Distortion; }
+
+ // Returns DistortionScale factor by which input texture size is increased to make
+ // post-distortion result distortion fit the viewport.
+ float GetDistortionScale() { updateIfDirty(); return Distortion.Scale; }
+
+ // Returns the size of a pixel within 2D coordinate system.
+ float Get2DUnitPixel() { updateIfDirty(); return (2.0f / (FovPixels * Distortion.Scale)); }
+
+ // Returns full set of Stereo rendering parameters for the specified eye.
+ const StereoEyeParams& GetEyeRenderParams(StereoEye eye);
+
+private:
+
+ void updateIfDirty() { if (DirtyFlag) updateComputedState(); }
+ void updateComputedState();
+
+ void updateDistortionOffsetAndScale();
+ void updateProjectionOffset();
+ void update2D();
+ void updateEyeParams();
+
+
+ // *** Modifiable State
+
+ StereoMode Mode;
+ float InterpupillaryDistance;
+ float AspectMultiplier; // Multiplied into aspect ratio to change it.
+ HMDInfo HMD;
+ DistortionConfig Distortion;
+ float DistortionFitX, DistortionFitY; // In [-1,1] half-screen viewport units.
+ Viewport FullView; // Entire window viewport.
+
+ float Area2DFov; // FOV range mapping to [-1, 1] 2D area.
+
+ // *** Computed State
+
+ bool DirtyFlag; // Set when any if the modifiable state changed.
+ bool IPDOverride; // True after SetIPD was called.
+ float YFov; // Vertical FOV.
+ float Aspect; // Aspect ratio: (w/h)*AspectMultiplier.
+ float ProjectionCenterOffset;
+ StereoEyeParams EyeRenderParams[2];
+
+
+ // ** 2D Rendering
+
+ // Number of 2D pixels in the FOV. This defines [-1,1] coordinate range for 2D.
+ float FovPixels;
+ Matrix4f OrthoCenter;
+ float OrthoPixelOffset;
+};
+
+
+}}} // OVR::Util::Render
+
+#endif