aboutsummaryrefslogtreecommitdiffstats
path: root/LibOVR/Src/OVR_SensorFusion.cpp
diff options
context:
space:
mode:
Diffstat (limited to 'LibOVR/Src/OVR_SensorFusion.cpp')
-rw-r--r--LibOVR/Src/OVR_SensorFusion.cpp1156
1 files changed, 686 insertions, 470 deletions
diff --git a/LibOVR/Src/OVR_SensorFusion.cpp b/LibOVR/Src/OVR_SensorFusion.cpp
index eac0366..6cfe00c 100644
--- a/LibOVR/Src/OVR_SensorFusion.cpp
+++ b/LibOVR/Src/OVR_SensorFusion.cpp
@@ -3,18 +3,18 @@
Filename : OVR_SensorFusion.cpp
Content : Methods that determine head orientation from sensor data over time
Created : October 9, 2012
-Authors : Michael Antonov, Steve LaValle, Max Katsev
+Authors : Michael Antonov, Steve LaValle, Dov Katz, Max Katsev
-Copyright : Copyright 2013 Oculus VR, Inc. All Rights reserved.
+Copyright : Copyright 2014 Oculus VR, Inc. All Rights reserved.
-Licensed under the Oculus VR SDK License Version 2.0 (the "License");
-you may not use the Oculus VR SDK except in compliance with the License,
+Licensed under the Oculus VR Rift SDK License Version 3.1 (the "License");
+you may not use the Oculus VR Rift SDK except in compliance with the License,
which is provided at the time of installation or download, or which
otherwise accompanies this software in either electronic or hard copy form.
You may obtain a copy of the License at
-http://www.oculusvr.com/licenses/LICENSE-2.0
+http://www.oculusvr.com/licenses/LICENSE-3.1
Unless required by applicable law or agreed to in writing, the Oculus VR SDK
distributed under the License is distributed on an "AS IS" BASIS,
@@ -29,8 +29,15 @@ limitations under the License.
#include "Kernel/OVR_System.h"
#include "OVR_JSON.h"
#include "OVR_Profile.h"
+#include "OVR_Stereo.h"
+#include "Recording/Recorder.h"
+
+// Temporary for debugging
+bool Global_Flag_1 = true;
+
+//Convenient global variable to temporarily extract this data.
+float TPH_CameraPoseOrientationWxyz[4];
-#define MAX_DEVICE_PROFILE_MAJOR_VERSION 1
namespace OVR {
@@ -38,579 +45,788 @@ namespace OVR {
// ***** Sensor Fusion
SensorFusion::SensorFusion(SensorDevice* sensor)
- : Stage(0), RunningTime(0), DeltaT(0.001f),
- Handler(getThis()), pDelegate(0),
- Gain(0.05f), EnableGravity(true),
- EnablePrediction(true), PredictionDT(0.03f), PredictionTimeIncrement(0.001f),
- FRawMag(10), FAngV(20),
- GyroOffset(), TiltAngleFilter(1000),
- EnableYawCorrection(false), MagCalibrated(false), MagNumReferences(0), MagRefIdx(-1), MagRefScore(0),
- MotionTrackingEnabled(true)
+ : MotionTrackingEnabled(true), VisionPositionEnabled(true),
+ EnableGravity(true), EnableYawCorrection(true), MagCalibrated(true), EnableCameraTiltCorrection(true),
+ FAngV(20), FAccelHeadset(1000), FAccelCamera(1000),
+ ExposureRecordHistory(100), LastMessageExposureFrame(NULL),
+ VisionMaxIMUTrackTime(4.0/60.0), // Integrate IMU up to 4 frames
+ HeadModel(0, OVR_DEFAULT_NECK_TO_EYE_VERTICAL, -OVR_DEFAULT_NECK_TO_EYE_HORIZONTAL),
+ DefaultCameraPosition(0, 0, -1)
{
+ pHandler = new BodyFrameHandler(this);
+
+ // And the clock is running...
+ LogText("*** SensorFusion Startup: TimeSeconds = %f\n", Timer::GetSeconds());
+
if (sensor)
AttachToSensor(sensor);
- MagCalibrationMatrix.SetIdentity();
+
+ // MA: 1/25/2014 for DK2
+ SetCenterPupilDepth(0.076f);
+
+ Reset();
}
SensorFusion::~SensorFusion()
-{
+{
+ delete(pHandler);
}
-
bool SensorFusion::AttachToSensor(SensorDevice* sensor)
{
- // clear the cached device information
- CachedSensorInfo.SerialNumber[0] = 0;
- CachedSensorInfo.VendorId = 0;
- CachedSensorInfo.ProductId = 0;
-
if (sensor != NULL)
{
- // Cache the sensor device so we can access this information during
- // mag saving and loading (avoid holding a reference to sensor to prevent
- // deadlock on shutdown)
- sensor->GetDeviceInfo(&CachedSensorInfo); // save the device information
- MessageHandler* pCurrentHandler = sensor->GetMessageHandler();
+ // Load IMU position
+ Array<PositionCalibrationReport> reports;
- if (pCurrentHandler == &Handler)
+ bool result = sensor->GetAllPositionCalibrationReports(&reports);
+ if(result)
{
- Reset();
- return true;
- }
+ PositionCalibrationReport const& imu = reports[reports.GetSize() - 1];
+ OVR_ASSERT(imu.PositionType == PositionCalibrationReport::PositionType_IMU);
+ IMUPosition = imu.Position;
+
+ Recorder::Buffer(imu);
+ Recorder::Buffer(reports);
- if (pCurrentHandler != NULL)
+ // convert from vision to the world frame
+ IMUPosition.x *= -1.0;
+ IMUPosition.z *= -1.0;
+ }
+ else
{
- OVR_DEBUG_LOG(
- ("SensorFusion::AttachToSensor failed - sensor %p already has handler", sensor));
- return false;
+ // TODO: set up IMUPosition for devices that don't have this report.
}
-
- // Automatically load the default mag calibration for this sensor
- LoadMagCalibration();
+ // Repopulate CPFOrigin
+ SetCenterPupilDepth(CenterPupilDepth);
}
- if (Handler.IsHandlerInstalled())
- {
- Handler.RemoveHandlerFromDevices();
- }
+ pHandler->RemoveHandlerFromDevices();
if (sensor != NULL)
{
- sensor->SetMessageHandler(&Handler);
+ sensor->AddMessageHandler(pHandler);
}
Reset();
+
+ // Initialize the sensor state
+ // TBD: This is a hack to avoid a race condition if sensor status is checked immediately
+ // after sensor creation but before any data has flowed through. We should probably
+ // not depend strictly on data flow to determine capabilites like orientation and position
+ // tracking, or else use some sort of synchronous method to wait for data
+ LocklessState init;
+ init.StatusFlags = Status_OrientationTracked;
+ UpdatedState.SetState(init);
+
return true;
}
-
- // Resets the current orientation
+// Resets the current orientation
void SensorFusion::Reset()
{
- Lock::Locker lockScope(Handler.GetHandlerLock());
- Q = Quatf();
- QUncorrected = Quatf();
- Stage = 0;
- RunningTime = 0;
- MagNumReferences = 0;
- MagRefIdx = -1;
- GyroOffset = Vector3f();
+ Lock::Locker lockScope(pHandler->GetHandlerLock());
+
+ UpdatedState.SetState(LocklessState());
+ State = PoseState<double>();
+ State.Transform.Position = -CPFPositionInIMUFrame; // place CPF at the origin, not the IMU
+ VisionState = PoseState<double>();
+ VisionError = PoseState<double>();
+ CurrentExposureIMUDelta = PoseState<double>();
+ CameraPose = Pose<double>(Quatd(), DefaultCameraPosition);
+ CameraPoseConfidence = -1;
+
+ ExposureRecordHistory.Clear();
+ LastMessageExposureFrame = MessageExposureFrame(NULL);
+ LastVisionAbsoluteTime = 0;
+ FullVisionCorrectionExposureCounter = 0;
+ Stage = 0;
+
+ MagNumReferences = 0;
+ MagRefIdx = -1;
+ MagRefScore = 0;
+ MagCorrectionIntegralTerm = Quatd();
+ AccelOffset = Vector3d();
+
+ FAccelCamera.Clear();
+ FAccelHeadset.Clear();
+ FAngV.Clear();
+
+ setNeckPivotFromPose ( State.Transform );
}
-// Compute a rotation required to transform "estimated" into "measured"
-// Returns an approximation of the goal rotation in the Simultaneous Orthogonal Rotations Angle representation
-// (vector direction is the axis of rotation, norm is the angle)
-Vector3f SensorFusion_ComputeCorrection(Vector3f measured, Vector3f estimated)
+//-------------------------------------------------------------------------------------
+// Vision & message processing
+
+void SensorFusion::OnVisionFailure()
{
- measured.Normalize();
- estimated.Normalize();
- Vector3f correction = measured.Cross(estimated);
- float cosError = measured.Dot(estimated);
- // from the def. of cross product, correction.Length() = sin(error)
- // therefore sin(error) * sqrt(2 / (1 + cos(error))) = 2 * sin(error / 2) ~= error in [-pi, pi]
- // Mathf::Tolerance is used to avoid div by 0 if cos(error) = -1
- return correction * sqrt(2 / (1 + cosError + Mathf::Tolerance));
+ // do nothing
}
-void SensorFusion::handleMessage(const MessageBodyFrame& msg)
+void SensorFusion::OnVisionPreviousFrame(const Pose<double>& pose)
{
- if (msg.Type != Message_BodyFrame || !IsMotionTrackingEnabled())
- return;
+ // simply save the observation for use in the next OnVisionSuccess call;
+ // this should not have unintended side-effects for position filtering,
+ // since the vision time is not updated and the system keeps thinking we don't have vision yet
+ VisionState.Transform = pose;
+}
- // Put the sensor readings into convenient local variables
- Vector3f gyro = msg.RotationRate;
- Vector3f accel = msg.Acceleration;
- Vector3f mag = msg.MagneticField;
+void SensorFusion::OnVisionSuccess(const Pose<double>& pose, UInt32 exposureCounter)
+{
+ Lock::Locker lockScope(pHandler->GetHandlerLock());
- // Insert current sensor data into filter history
- FRawMag.AddElement(mag);
- FAngV.AddElement(gyro);
+ LastVisionAbsoluteTime = GetTime();
- // Apply the calibration parameters to raw mag
- Vector3f calMag = MagCalibrated ? GetCalibratedMagValue(FRawMag.Mean()) : FRawMag.Mean();
+ // ********* LastVisionExposureRecord *********
- // Set variables accessible through the class API
- DeltaT = msg.TimeDelta;
- AngV = gyro;
- A = accel;
- RawMag = mag;
- CalMag = calMag;
+ // Skip old data and use the record that matches the exposure counter
+ while (!ExposureRecordHistory.IsEmpty() &&
+ (ExposureRecordHistory.PeekFront().ExposureCounter <= exposureCounter))
+ {
+ LastVisionExposureRecord = ExposureRecordHistory.PopFront();
+ }
- // Keep track of time
- Stage++;
- RunningTime += DeltaT;
+ // Use current values if we don't have historical data
+ // Right now, this will happen if we get first frame after prediction failure,
+ // and this exposure wasn't in the buffer. (TBD: Unlikely.. unless IMU message wasn't sent?)
+ if (LastVisionExposureRecord.ExposureCounter != exposureCounter)
+ LastVisionExposureRecord = ExposureRecord(exposureCounter, GetTime(), State, PoseState<double>());
- // Small preprocessing
- Quatf Qinv = Q.Inverted();
- Vector3f up = Qinv.Rotate(Vector3f(0, 1, 0));
+ // ********* VisionState *********
+
+ // This is stored in the camera frame, so need to be careful when combining with the IMU data,
+ // which is in the world frame
- Vector3f gyroCorrected = gyro;
+ // convert to the world frame
+ Vector3d positionChangeW = CameraPose.Orientation.Rotate(pose.Position - VisionState.Transform.Position);
- // Apply integral term
- // All the corrections are stored in the Simultaneous Orthogonal Rotations Angle representation,
- // which allows to combine and scale them by just addition and multiplication
- if (EnableGravity || EnableYawCorrection)
- gyroCorrected -= GyroOffset;
+ VisionState.TimeInSeconds = LastVisionExposureRecord.ExposureTime;
+ VisionState.Transform = pose;
- if (EnableGravity)
+ // Check LastVisionExposureRecord.Delta.TimeInSeconds to avoid divide by zero, which we could (rarely)
+ // get if we didn't have exposures delta for history (skipped exposure counters
+ // due to video mode change that stalls USB, etc).
+ if (LastVisionExposureRecord.Delta.TimeInSeconds > 0.001)
+ {
+ // Use the accel data to estimate the velocity at the exposure time
+ // (as opposed to the average velocity between exposures)
+ Vector3d velocityW = LastVisionExposureRecord.Delta.LinearVelocity +
+ (positionChangeW - LastVisionExposureRecord.Delta.Transform.Position) /
+ LastVisionExposureRecord.Delta.TimeInSeconds;
+ VisionState.LinearVelocity = CameraPose.Orientation.Inverted().Rotate(velocityW);
+ }
+ else
{
- const float spikeThreshold = 0.01f;
- const float gravityThreshold = 0.1f;
- float proportionalGain = 5 * Gain; // Gain parameter should be removed in a future release
- float integralGain = 0.0125f;
+ VisionState.LinearVelocity = Vector3d(0,0,0);
+ }
- Vector3f tiltCorrection = SensorFusion_ComputeCorrection(accel, up);
+ // ********* VisionError *********
- if (Stage > 5)
- {
- // Spike detection
- float tiltAngle = up.Angle(accel);
- TiltAngleFilter.AddElement(tiltAngle);
- if (tiltAngle > TiltAngleFilter.Mean() + spikeThreshold)
- proportionalGain = integralGain = 0;
- // Acceleration detection
- const float gravity = 9.8f;
- if (fabs(accel.Length() / gravity - 1) > gravityThreshold)
- integralGain = 0;
- }
- else // Apply full correction at the startup
- {
- proportionalGain = 1 / DeltaT;
- integralGain = 0;
- }
+ // This is in the world frame, so transform the vision data appropriately
- gyroCorrected += (tiltCorrection * proportionalGain);
- GyroOffset -= (tiltCorrection * integralGain * DeltaT);
- }
+ VisionError.Transform.Position = CameraPose.Orientation.Rotate(VisionState.Transform.Position) + CameraPose.Position -
+ LastVisionExposureRecord.State.Transform.Position;
+ VisionError.LinearVelocity = CameraPose.Orientation.Rotate(VisionState.LinearVelocity) -
+ LastVisionExposureRecord.State.LinearVelocity;
+ VisionError.Transform.Orientation = CameraPose.Orientation * VisionState.Transform.Orientation *
+ LastVisionExposureRecord.State.Transform.Orientation.Inverted();
+}
- if (EnableYawCorrection && MagCalibrated && RunningTime > 2.0f)
+Pose<double> SensorFusion::GetVisionPrediction(UInt32 exposureCounter)
+{
+ Lock::Locker lockScope(pHandler->GetHandlerLock());
+
+ // Combine the small deltas together
+ // Should only be one iteration, unless we are skipping camera frames
+ ExposureRecord record;
+ PoseState<double> delta = PoseState<double>();
+ while (!ExposureRecordHistory.IsEmpty() &&
+ (ExposureRecordHistory.PeekFront().ExposureCounter <= exposureCounter))
{
- const float maxMagRefDist = 0.1f;
- const float maxTiltError = 0.05f;
- float proportionalGain = 0.01f;
- float integralGain = 0.0005f;
+ record = ExposureRecordHistory.PopFront();
+ delta.AdvanceByDelta(record.Delta);
+ }
+ // Put the combine exposure record back in the history, for use in HandleVisionSuccess(...)
+ record.Delta = delta;
+ ExposureRecordHistory.PushFront(record);
- // Update the reference point if needed
- if (MagRefIdx < 0 || calMag.Distance(MagRefsInBodyFrame[MagRefIdx]) > maxMagRefDist)
- {
- // Delete a bad point
- if (MagRefIdx >= 0 && MagRefScore < 0)
- {
- MagNumReferences--;
- MagRefsInBodyFrame[MagRefIdx] = MagRefsInBodyFrame[MagNumReferences];
- MagRefsInWorldFrame[MagRefIdx] = MagRefsInWorldFrame[MagNumReferences];
- }
- // Find a new one
- MagRefIdx = -1;
- MagRefScore = 1000;
- float bestDist = maxMagRefDist;
- for (int i = 0; i < MagNumReferences; i++)
- {
- float dist = calMag.Distance(MagRefsInBodyFrame[i]);
- if (bestDist > dist)
- {
- bestDist = dist;
- MagRefIdx = i;
- }
- }
- // Create one if needed
- if (MagRefIdx < 0 && MagNumReferences < MagMaxReferences)
- {
- MagRefIdx = MagNumReferences;
- MagRefsInBodyFrame[MagRefIdx] = calMag;
- MagRefsInWorldFrame[MagRefIdx] = Q.Rotate(calMag).Normalized();
- MagNumReferences++;
- }
- }
+ // Add the effect of initial pose and velocity from vision.
+ // Don't forget to transform IMU to the camera frame
+ Pose<double> c(VisionState.Transform.Orientation * delta.Transform.Orientation,
+ VisionState.Transform.Position + VisionState.LinearVelocity * delta.TimeInSeconds +
+ CameraPose.Orientation.Inverted().Rotate(delta.Transform.Position));
- if (MagRefIdx >= 0)
- {
- Vector3f magEstimated = Qinv.Rotate(MagRefsInWorldFrame[MagRefIdx]);
- Vector3f magMeasured = calMag.Normalized();
+ return c;
+}
- // Correction is computed in the horizontal plane (in the world frame)
- Vector3f yawCorrection = SensorFusion_ComputeCorrection(magMeasured.ProjectToPlane(up),
- magEstimated.ProjectToPlane(up));
+void SensorFusion::handleMessage(const MessageBodyFrame& msg)
+{
+ if (msg.Type != Message_BodyFrame || !IsMotionTrackingEnabled())
+ return;
- if (fabs(up.Dot(magEstimated - magMeasured)) < maxTiltError)
- {
- MagRefScore += 2;
- }
- else // If the vertical angle is wrong, decrease the score
- {
- MagRefScore -= 1;
- proportionalGain = integralGain = 0;
- }
- gyroCorrected += (yawCorrection * proportionalGain);
- GyroOffset -= (yawCorrection * integralGain * DeltaT);
- }
- }
+ // Put the sensor readings into convenient local variables
+ Vector3d gyro(msg.RotationRate);
+ Vector3d accel(msg.Acceleration);
+ Vector3d mag(msg.MagneticField);
+ double DeltaT = msg.TimeDelta;
+ MagCalibrated = msg.MagCalibrated;
+
+ // Keep track of time
+ State.TimeInSeconds = msg.AbsoluteTimeSeconds;
+ // We got an update in the last 60ms and the data is not very old
+ bool visionIsRecent = (GetTime() - LastVisionAbsoluteTime < VisionMaxIMUTrackTime) && (GetVisionLatency() < 0.25);
+ Stage++;
+
+ // Insert current sensor data into filter history
+ FAngV.PushBack(gyro);
+ FAccelHeadset.Update(accel, DeltaT, Quatd(gyro, gyro.Length() * DeltaT));
- // Update the orientation quaternion based on the corrected angular velocity vector
- float angle = gyroCorrected.Length() * DeltaT;
- if (angle > 0.0f)
- Q = Q * Quatf(gyroCorrected, angle);
+ // Process raw inputs
+ // in the future the gravity offset can be calibrated using vision feedback
+ Vector3d accelW = State.Transform.Orientation.Rotate(accel) - Vector3d(0, 9.8, 0);
+
+ // Update headset orientation
+ State.StoreAndIntegrateGyro(gyro, DeltaT);
+ // Tilt correction based on accelerometer
+ if (EnableGravity)
+ applyTiltCorrection(DeltaT);
+ // Yaw correction based on camera
+ if (EnableYawCorrection && visionIsRecent)
+ applyVisionYawCorrection(DeltaT);
+ // Yaw correction based on magnetometer
+ if (EnableYawCorrection && MagCalibrated) // MagCalibrated is always false for DK2 for now
+ applyMagYawCorrection(mag, DeltaT);
+
+ // Update camera orientation
+ if (EnableCameraTiltCorrection && visionIsRecent)
+ applyCameraTiltCorrection(accel, DeltaT);
// The quaternion magnitude may slowly drift due to numerical error,
// so it is periodically normalized.
- if (Stage % 500 == 0)
- Q.Normalize();
+ if ((Stage & 0xFF) == 0)
+ {
+ State.Transform.Orientation.Normalize();
+ CameraPose.Orientation.Normalize();
+ }
+
+ // Update headset position
+ if (VisionPositionEnabled && visionIsRecent)
+ {
+ // Integrate UMI and velocity here up to a fixed amount of time after vision.
+ State.StoreAndIntegrateAccelerometer(accelW + AccelOffset, DeltaT);
+ // Position correction based on camera
+ applyPositionCorrection(DeltaT);
+ // Compute where the neck pivot would be.
+ setNeckPivotFromPose(State.Transform);
+ }
+ else
+ {
+ // Fall back onto internal head model
+ // Use the last-known neck pivot position to figure out the expected IMU position.
+ // (should be the opposite of SensorFusion::setNeckPivotFromPose)
+ Vector3d imuInNeckPivotFrame = HeadModel - CPFPositionInIMUFrame;
+ State.Transform.Position = NeckPivotPosition + State.Transform.Orientation.Rotate(imuInNeckPivotFrame);
+
+ // We can't trust velocity past this point.
+ State.LinearVelocity = Vector3d(0,0,0);
+ State.LinearAcceleration = accelW;
+ }
+
+ // Compute the angular acceleration
+ State.AngularAcceleration = (FAngV.GetSize() >= 12 && DeltaT > 0) ?
+ (FAngV.SavitzkyGolayDerivative12() / DeltaT) : Vector3d();
+
+ // Update the dead reckoning state used for incremental vision tracking
+ CurrentExposureIMUDelta.StoreAndIntegrateGyro(gyro, DeltaT);
+ CurrentExposureIMUDelta.StoreAndIntegrateAccelerometer(accelW, DeltaT);
+
+ // If we only compiled the stub version of Recorder, then branch prediction shouldn't
+ // have any problem with this if statement. Actually, it should be optimized out, but need to verify.
+ if(Recorder::GetRecorder())
+ {
+ Posed savePose = static_cast<Posed>(GetPoseAtTime(GetTime()));
+ Recorder::LogData("sfTimeSeconds", State.TimeInSeconds);
+ Recorder::LogData("sfPose", savePose);
+ }
+
+ // Store the lockless state.
+ LocklessState lstate;
+ lstate.StatusFlags = Status_OrientationTracked;
+ if (VisionPositionEnabled)
+ lstate.StatusFlags |= Status_PositionConnected;
+ if (VisionPositionEnabled && visionIsRecent)
+ lstate.StatusFlags |= Status_PositionTracked;
+ lstate.State = State;
+ lstate.Temperature = msg.Temperature;
+ lstate.Magnetometer = mag;
+ UpdatedState.SetState(lstate);
}
-// A predictive filter based on extrapolating the smoothed, current angular velocity
-Quatf SensorFusion::GetPredictedOrientation(float pdt)
-{
- Lock::Locker lockScope(Handler.GetHandlerLock());
- Quatf qP = Q;
-
- if (EnablePrediction)
+void SensorFusion::handleExposure(const MessageExposureFrame& msg)
+{
+ if (msg.CameraFrameCount > LastMessageExposureFrame.CameraFrameCount + 1)
{
- // This method assumes a constant angular velocity
- Vector3f angVelF = FAngV.SavitzkyGolaySmooth8();
- float angVelFL = angVelF.Length();
-
- // Force back to raw measurement
- angVelF = AngV;
- angVelFL = AngV.Length();
-
- // Dynamic prediction interval: Based on angular velocity to reduce vibration
- const float minPdt = 0.001f;
- const float slopePdt = 0.1f;
- float newpdt = pdt;
- float tpdt = minPdt + slopePdt * angVelFL;
- if (tpdt < pdt)
- newpdt = tpdt;
- //LogText("PredictonDTs: %d\n",(int)(newpdt / PredictionTimeIncrement + 0.5f));
-
- if (angVelFL > 0.001f)
+ LogText("Skipped %d tracker exposure counters\n",
+ msg.CameraFrameCount - (LastMessageExposureFrame.CameraFrameCount + 1));
+ }
+ else
+ {
+ // MA: Check timing deltas
+ // Is seems repetitive tracking loss occurs when timing gets out of sync
+ // Could be caused by some bug in HW timing + time filter?
+ if (fabs(State.TimeInSeconds - msg.CameraTimeSeconds) > 0.1f)
{
- Vector3f rotAxisP = angVelF / angVelFL;
- float halfRotAngleP = angVelFL * newpdt * 0.5f;
- float sinaHRAP = sin(halfRotAngleP);
- Quatf deltaQP(rotAxisP.x*sinaHRAP, rotAxisP.y*sinaHRAP,
- rotAxisP.z*sinaHRAP, cos(halfRotAngleP));
- qP = Q * deltaQP;
+ static int logLimiter = 0;
+ if ((logLimiter & 0x3F) == 0)
+ {
+ LogText("Timing out of sync: State.T=%f, ExposureT=%f, delta=%f, Time()=%f\n",
+ State.TimeInSeconds, msg.CameraTimeSeconds,
+ State.TimeInSeconds - msg.CameraTimeSeconds, GetTime());
+ }
+ logLimiter++;
}
+
}
- return qP;
-}
+ CurrentExposureIMUDelta.TimeInSeconds = msg.CameraTimeSeconds - LastMessageExposureFrame.CameraTimeSeconds;
+ ExposureRecordHistory.PushBack(ExposureRecord(
+ msg.CameraFrameCount, msg.CameraTimeSeconds, State, CurrentExposureIMUDelta));
-Vector3f SensorFusion::GetCalibratedMagValue(const Vector3f& rawMag) const
+ // Every new exposure starts from zero
+ CurrentExposureIMUDelta = PoseState<double>();
+ LastMessageExposureFrame = msg;
+}
+
+// If you have a known-good pose, this sets the neck pivot position.
+void SensorFusion::setNeckPivotFromPose(Posed const &pose)
{
- OVR_ASSERT(HasMagCalibration());
- return MagCalibrationMatrix.Transform(rawMag);
- }
+ Vector3d imuInNeckPivotFrame = HeadModel - CPFPositionInIMUFrame;
+ NeckPivotPosition = pose.Position - pose.Orientation.Rotate(imuInNeckPivotFrame);
+}
-SensorFusion::BodyFrameHandler::~BodyFrameHandler()
+// These two functions need to be moved into Quat class
+// Compute a rotation required to transform "from" into "to".
+Quatd vectorAlignmentRotation(const Vector3d &from, const Vector3d &to)
{
- RemoveHandlerFromDevices();
+ Vector3d axis = from.Cross(to);
+ if (axis.LengthSq() == 0)
+ // this handles both collinear and zero-length input cases
+ return Quatd();
+ double angle = from.Angle(to);
+ return Quatd(axis, angle);
}
-void SensorFusion::BodyFrameHandler::OnMessage(const Message& msg)
+// Compute the part of the quaternion that rotates around Y axis
+Quatd extractYawRotation(const Quatd &error)
{
- if (msg.Type == Message_BodyFrame)
- pFusion->handleMessage(static_cast<const MessageBodyFrame&>(msg));
- if (pFusion->pDelegate)
- pFusion->pDelegate->OnMessage(msg);
+ if (error.y == 0)
+ return Quatd();
+ double phi = atan2(error.w, error.y);
+ double alpha = Mathd::Pi - 2 * phi;
+ return Quatd(Axis_Y, alpha);
+}
+
+void SensorFusion::applyPositionCorrection(double deltaT)
+{
+ // Each component of gainPos is equivalent to a Kalman gain of (sigma_process / sigma_observation)
+ const Vector3d gainPos = Vector3d(10, 10, 8);
+ const Vector3d gainVel = gainPos.EntrywiseMultiply(gainPos) * 0.5;
+ const double snapThreshold = 0.1; // Large value (previously 0.01, which caused frequent jumping)
+
+ if (LastVisionExposureRecord.ExposureCounter <= FullVisionCorrectionExposureCounter)
+ return;
+
+ if (VisionError.Transform.Position.LengthSq() > (snapThreshold * snapThreshold) ||
+ !(UpdatedState.GetState().StatusFlags & Status_PositionTracked))
+ {
+ // high error or just reacquired position from vision - apply full correction
+ State.Transform.Position += VisionError.Transform.Position;
+ State.LinearVelocity += VisionError.LinearVelocity;
+ // record the frame counter to avoid additional correction until we see the new data
+ FullVisionCorrectionExposureCounter = LastMessageExposureFrame.CameraFrameCount;
+ }
+ else
+ {
+ State.Transform.Position += VisionError.Transform.Position.EntrywiseMultiply(gainPos) * deltaT;
+ State.LinearVelocity += VisionError.Transform.Position.EntrywiseMultiply(gainVel) * deltaT;
+ // Uncomment the line below to try acclerometer bias estimation in filter
+ //AccelOffset += VisionError.Pose.Position * gainAccel * deltaT;
+ }
}
-bool SensorFusion::BodyFrameHandler::SupportsMessageType(MessageType type) const
+void SensorFusion::applyVisionYawCorrection(double deltaT)
{
- return (type == Message_BodyFrame);
+ const double gain = 0.25;
+ const double snapThreshold = 0.1;
+
+ if (LastVisionExposureRecord.ExposureCounter <= FullVisionCorrectionExposureCounter)
+ return;
+
+ Quatd yawError = extractYawRotation(VisionError.Transform.Orientation);
+
+ Quatd correction;
+ if (Alg::Abs(yawError.w) < cos(snapThreshold / 2)) // angle(yawError) > snapThreshold
+ {
+ // high error, jump to the vision position
+ correction = yawError;
+ // record the frame counter to avoid additional correction until we see the new data
+ FullVisionCorrectionExposureCounter = LastMessageExposureFrame.CameraFrameCount;
+ }
+ else
+ correction = yawError.Nlerp(Quatd(), gain * deltaT);
+
+ State.Transform.Orientation = correction * State.Transform.Orientation;
}
-// Writes the current calibration for a particular device to a device profile file
-// sensor - the sensor that was calibrated
-// cal_name - an optional name for the calibration or default if cal_name == NULL
-bool SensorFusion::SaveMagCalibration(const char* calibrationName) const
+void SensorFusion::applyMagYawCorrection(Vector3d mag, double deltaT)
{
- if (CachedSensorInfo.SerialNumber[0] == 0 || !HasMagCalibration())
- return false;
-
- // A named calibration may be specified for calibration in different
- // environments, otherwise the default calibration is used
- if (calibrationName == NULL)
- calibrationName = "default";
-
- // Generate a mag calibration event
- JSON* calibration = JSON::CreateObject();
- // (hardcoded for now) the measurement and representation method
- calibration->AddStringItem("Version", "2.0");
- calibration->AddStringItem("Name", "default");
-
- // time stamp the calibration
- char time_str[64];
-
-#ifdef OVR_OS_WIN32
- struct tm caltime;
- localtime_s(&caltime, &MagCalibrationTime);
- strftime(time_str, 64, "%Y-%m-%d %H:%M:%S", &caltime);
-#else
- struct tm* caltime;
- caltime = localtime(&MagCalibrationTime);
- strftime(time_str, 64, "%Y-%m-%d %H:%M:%S", caltime);
-#endif
-
- calibration->AddStringItem("Time", time_str);
-
- // write the full calibration matrix
- char matrix[256];
- Matrix4f calmat = GetMagCalibration();
- calmat.ToString(matrix, 256);
- calibration->AddStringItem("CalibrationMatrix", matrix);
- // save just the offset, for backwards compatibility
- // this can be removed when we don't want to support 0.2.4 anymore
- Vector3f center(calmat.M[0][3], calmat.M[1][3], calmat.M[2][3]);
- Matrix4f tmp = calmat; tmp.M[0][3] = tmp.M[1][3] = tmp.M[2][3] = 0; tmp.M[3][3] = 1;
- center = tmp.Inverted().Transform(center);
- Matrix4f oldcalmat; oldcalmat.M[0][3] = center.x; oldcalmat.M[1][3] = center.y; oldcalmat.M[2][3] = center.z;
- oldcalmat.ToString(matrix, 256);
- calibration->AddStringItem("Calibration", matrix);
-
+ const double minMagLengthSq = Mathd::Tolerance; // need to use a real value to discard very weak fields
+ const double maxMagRefDist = 0.1;
+ const double maxTiltError = 0.05;
+ const double proportionalGain = 0.01;
+ const double integralGain = 0.0005;
+
+ Vector3d magW = State.Transform.Orientation.Rotate(mag);
+ // verify that the horizontal component is sufficient
+ if (magW.x * magW.x + magW.z * magW.z < minMagLengthSq)
+ return;
+ magW.Normalize();
- String path = GetBaseOVRPath(true);
- path += "/Devices.json";
-
- // Look for a prexisting device file to edit
- Ptr<JSON> root = *JSON::Load(path);
- if (root)
- { // Quick sanity check of the file type and format before we parse it
- JSON* version = root->GetFirstItem();
- if (version && version->Name == "Oculus Device Profile Version")
- {
- int major = atoi(version->Value.ToCStr());
- if (major > MAX_DEVICE_PROFILE_MAJOR_VERSION)
+ // Update the reference point if needed
+ if (MagRefScore < 0 || MagRefIdx < 0 ||
+ mag.Distance(MagRefsInBodyFrame[MagRefIdx]) > maxMagRefDist)
+ {
+ // Delete a bad point
+ if (MagRefIdx >= 0 && MagRefScore < 0)
+ {
+ MagNumReferences--;
+ MagRefsInBodyFrame[MagRefIdx] = MagRefsInBodyFrame[MagNumReferences];
+ MagRefsInWorldFrame[MagRefIdx] = MagRefsInWorldFrame[MagNumReferences];
+ MagRefsPoses[MagRefIdx] = MagRefsPoses[MagRefIdx];
+ }
+ // Find a new one
+ MagRefIdx = -1;
+ MagRefScore = 1000;
+ double bestDist = maxMagRefDist;
+ for (int i = 0; i < MagNumReferences; i++)
+ {
+ double dist = mag.Distance(MagRefsInBodyFrame[i]);
+ if (bestDist > dist)
{
- // don't use the file on unsupported major version number
- root->Release();
- root = NULL;
+ bestDist = dist;
+ MagRefIdx = i;
}
}
- else
+ // Create one if needed
+ if (MagRefIdx < 0 && MagNumReferences < MagMaxReferences)
{
- root->Release();
- root = NULL;
+ MagRefIdx = MagNumReferences;
+ MagRefsInBodyFrame[MagRefIdx] = mag;
+ MagRefsInWorldFrame[MagRefIdx] = magW;
+ MagRefsPoses[MagRefIdx] = State.Transform.Orientation;
+ MagNumReferences++;
}
}
- JSON* device = NULL;
- if (root)
+ if (MagRefIdx >= 0)
{
- device = root->GetFirstItem(); // skip the header
- device = root->GetNextItem(device);
- while (device)
- { // Search for a previous calibration with the same name for this device
- // and remove it before adding the new one
- if (device->Name == "Device")
- {
- JSON* item = device->GetItemByName("Serial");
- if (item && item->Value == CachedSensorInfo.SerialNumber)
- { // found an entry for this device
- item = device->GetNextItem(item);
- while (item)
- {
- if (item->Name == "MagCalibration")
- {
- JSON* name = item->GetItemByName("Name");
- if (name && name->Value == calibrationName)
- { // found a calibration of the same name
- item->RemoveNode();
- item->Release();
- break;
- }
- }
- item = device->GetNextItem(item);
- }
-
- // update the auto-mag flag
- item = device->GetItemByName("EnableYawCorrection");
- if (item)
- item->dValue = (double)EnableYawCorrection;
- else
- device->AddBoolItem("EnableYawCorrection", EnableYawCorrection);
-
- break;
- }
- }
+ Vector3d magRefW = MagRefsInWorldFrame[MagRefIdx];
- device = root->GetNextItem(device);
+ // If the vertical angle is wrong, decrease the score and do nothing
+ if (Alg::Abs(magRefW.y - magW.y) > maxTiltError)
+ {
+ MagRefScore -= 1;
+ return;
}
+
+ MagRefScore += 2;
+#if 0
+ // this doesn't seem to work properly, need to investigate
+ Quatd error = vectorAlignmentRotation(magW, magRefW);
+ Quatd yawError = extractYawRotation(error);
+#else
+ // Correction is computed in the horizontal plane
+ magW.y = magRefW.y = 0;
+ Quatd yawError = vectorAlignmentRotation(magW, magRefW);
+#endif
+ Quatd correction = yawError.Nlerp(Quatd(), proportionalGain * deltaT) *
+ MagCorrectionIntegralTerm.Nlerp(Quatd(), deltaT);
+ MagCorrectionIntegralTerm = MagCorrectionIntegralTerm * yawError.Nlerp(Quatd(), integralGain * deltaT);
+
+ State.Transform.Orientation = correction * State.Transform.Orientation;
}
+}
+
+void SensorFusion::applyTiltCorrection(double deltaT)
+{
+ const double gain = 0.25;
+ const double snapThreshold = 0.1;
+ const Vector3d up(0, 1, 0);
+
+ Vector3d accelW = State.Transform.Orientation.Rotate(FAccelHeadset.GetFilteredValue());
+ Quatd error = vectorAlignmentRotation(accelW, up);
+
+ Quatd correction;
+ if (FAccelHeadset.GetSize() == 1 ||
+ ((Alg::Abs(error.w) < cos(snapThreshold / 2) && FAccelHeadset.Confidence() > 0.75)))
+ // full correction for start-up
+ // or large error with high confidence
+ correction = error;
+ else if (FAccelHeadset.Confidence() > 0.5)
+ correction = error.Nlerp(Quatd(), gain * deltaT);
else
- { // Create a new device root
- root = *JSON::CreateObject();
- root->AddStringItem("Oculus Device Profile Version", "1.0");
- }
+ // accelerometer is unreliable due to movement
+ return;
- if (device == NULL)
- {
- device = JSON::CreateObject();
- device->AddStringItem("Product", CachedSensorInfo.ProductName);
- device->AddNumberItem("ProductID", CachedSensorInfo.ProductId);
- device->AddStringItem("Serial", CachedSensorInfo.SerialNumber);
- device->AddBoolItem("EnableYawCorrection", EnableYawCorrection);
+ State.Transform.Orientation = correction * State.Transform.Orientation;
+}
+
+void SensorFusion::applyCameraTiltCorrection(Vector3d accel, double deltaT)
+{
+ const double snapThreshold = 0.02; // in radians
+ const double maxCameraPositionOffset = 0.2;
+ const Vector3d up(0, 1, 0), forward(0, 0, -1);
+
+ if (LastVisionExposureRecord.ExposureCounter <= FullVisionCorrectionExposureCounter)
+ return;
+
+ // for startup use filtered value instead of instantaneous for stability
+ if (FAccelCamera.IsEmpty())
+ accel = FAccelHeadset.GetFilteredValue();
- root->AddItem("Device", device);
+ Quatd headsetToCamera = CameraPose.Orientation.Inverted() * VisionError.Transform.Orientation * State.Transform.Orientation;
+ // this is what the hypothetical camera-mounted accelerometer would show
+ Vector3d accelCamera = headsetToCamera.Rotate(accel);
+ FAccelCamera.Update(accelCamera, deltaT);
+ Vector3d accelCameraW = CameraPose.Orientation.Rotate(FAccelCamera.GetFilteredValue());
+
+ Quatd error1 = vectorAlignmentRotation(accelCameraW, up);
+ // cancel out yaw rotation
+ Vector3d forwardCamera = (error1 * CameraPose.Orientation).Rotate(forward);
+ forwardCamera.y = 0;
+ Quatd error2 = vectorAlignmentRotation(forwardCamera, forward);
+ // combined error
+ Quatd error = error2 * error1;
+
+ double confidence = FAccelCamera.Confidence();
+ // penalize the confidence if looking away from the camera
+ // TODO: smooth fall-off
+ if (VisionState.Transform.Orientation.Rotate(forward).Angle(forward) > 1)
+ confidence *= 0.5;
+
+ Quatd correction;
+ if (FAccelCamera.GetSize() == 1 ||
+ confidence > CameraPoseConfidence + 0.2 ||
+ // disabled due to false positives when moving side to side
+// (Alg::Abs(error.w) < cos(5 * snapThreshold / 2) && confidence > 0.55) ||
+ (Alg::Abs(error.w) < cos(snapThreshold / 2) && confidence > 0.75))
+ {
+ // large error with high confidence
+ correction = error;
+ // update the confidence level
+ CameraPoseConfidence = confidence;
+ // record the frame counter to avoid additional correction until we see the new data
+ FullVisionCorrectionExposureCounter = LastMessageExposureFrame.CameraFrameCount;
+
+ LogText("adjust camera tilt confidence %f angle %f\n",
+ CameraPoseConfidence, RadToDegree(correction.Angle(Quatd())));
}
+ else
+ {
+ // accelerometer is unreliable due to movement
+ return;
+ }
+
+ Quatd newOrientation = correction * CameraPose.Orientation;
+ // compute a camera position change that together with the camera rotation would result in zero player movement
+ Vector3d newPosition = CameraPose.Orientation.Rotate(VisionState.Transform.Position) + CameraPose.Position -
+ newOrientation.Rotate(VisionState.Transform.Position);
+ // if the new position is too far, reset to default
+ // (can't hide the rotation, might as well use it to reset the position)
+ if (newPosition.DistanceSq(DefaultCameraPosition) > maxCameraPositionOffset * maxCameraPositionOffset)
+ newPosition = DefaultCameraPosition;
- // Create and the add the new calibration event to the device
- device->AddItem("MagCalibration", calibration);
+ CameraPose.Orientation = newOrientation;
+ CameraPose.Position = newPosition;
- return root->Save(path);
+ //Convenient global variable to temporarily extract this data.
+ TPH_CameraPoseOrientationWxyz[0] = (float) newOrientation.w;
+ TPH_CameraPoseOrientationWxyz[1] = (float) newOrientation.x;
+ TPH_CameraPoseOrientationWxyz[2] = (float) newOrientation.y;
+ TPH_CameraPoseOrientationWxyz[3] = (float) newOrientation.z;
+
+
+ LogText("adjust camera position %f %f %f\n", newPosition.x, newPosition.y, newPosition.z);
}
-// Loads a saved calibration for the specified device from the device profile file
-// sensor - the sensor that the calibration was saved for
-// cal_name - an optional name for the calibration or the default if cal_name == NULL
-bool SensorFusion::LoadMagCalibration(const char* calibrationName)
+//-------------------------------------------------------------------------------------
+// Head model functions.
+
+// Sets up head-and-neck model and device-to-pupil dimensions from the user's profile.
+void SensorFusion::SetUserHeadDimensions(Profile const *profile, HmdRenderInfo const &hmdRenderInfo)
{
- if (CachedSensorInfo.SerialNumber[0] == 0)
- return false;
-
- // A named calibration may be specified for calibration in different
- // environments, otherwise the default calibration is used
- if (calibrationName == NULL)
- calibrationName = "default";
-
- String path = GetBaseOVRPath(true);
- path += "/Devices.json";
-
- // Load the device profiles
- Ptr<JSON> root = *JSON::Load(path);
- if (root == NULL)
- return false;
-
- // Quick sanity check of the file type and format before we parse it
- JSON* version = root->GetFirstItem();
- if (version && version->Name == "Oculus Device Profile Version")
- {
- int major = atoi(version->Value.ToCStr());
- if (major > MAX_DEVICE_PROFILE_MAJOR_VERSION)
- return false; // don't parse the file on unsupported major version number
+ float neckEyeHori = OVR_DEFAULT_NECK_TO_EYE_HORIZONTAL;
+ float neckEyeVert = OVR_DEFAULT_NECK_TO_EYE_VERTICAL;
+ if ( profile != NULL )
+ {
+ float neckeye[2];
+ if (profile->GetFloatValues(OVR_KEY_NECK_TO_EYE_DISTANCE, neckeye, 2) == 2)
+ {
+ neckEyeHori = neckeye[0];
+ neckEyeVert = neckeye[1];
+ }
}
- else
+ // Make sure these are vaguely sensible values.
+ OVR_ASSERT ( ( neckEyeHori > 0.05f ) && ( neckEyeHori < 0.5f ) );
+ OVR_ASSERT ( ( neckEyeVert > 0.05f ) && ( neckEyeVert < 0.5f ) );
+ SetHeadModel ( Vector3f ( 0.0, neckEyeVert, -neckEyeHori ) );
+
+ // Find the distance from the center of the screen to the "center eye"
+ // This center eye is used by systems like rendering & audio to represent the player,
+ // and they will handle the offsets needed from there to each actual eye.
+
+ // HACK HACK HACK
+ // We know for DK1 the screen->lens surface distance is roughly 0.049f, and that the faceplate->lens is 0.02357f.
+ // We're going to assume(!!!!) that all HMDs have the same screen->faceplate distance.
+ // Crystal Cove was measured to be roughly 0.025 screen->faceplate which agrees with this assumption.
+ // TODO: do this properly! Update: Measured this at 0.02733 with a CC prototype, CES era (PT7), on 2/19/14 -Steve
+ float screenCenterToMidplate = 0.02733f;
+
+ float centerEyeRelief = hmdRenderInfo.GetEyeCenter().ReliefInMeters;
+ if ( profile == NULL )
{
- return false;
+ // No valid profile, so the eye-relief won't be correct either, so fill in a default that feels good
+ centerEyeRelief = 0.020f;
}
+ float centerPupilDepth = screenCenterToMidplate + hmdRenderInfo.LensSurfaceToMidplateInMeters + centerEyeRelief;
+ SetCenterPupilDepth ( centerPupilDepth );
+}
- bool autoEnableCorrection = false;
-
- JSON* device = root->GetNextItem(version);
- while (device)
- { // Search for a previous calibration with the same name for this device
- // and remove it before adding the new one
- if (device->Name == "Device")
- {
- JSON* item = device->GetItemByName("Serial");
- if (item && item->Value == CachedSensorInfo.SerialNumber)
- { // found an entry for this device
-
- JSON* autoyaw = device->GetItemByName("EnableYawCorrection");
- if (autoyaw)
- autoEnableCorrection = (autoyaw->dValue != 0);
-
- int maxCalibrationVersion = 0;
- item = device->GetNextItem(item);
- while (item)
- {
- if (item->Name == "MagCalibration")
- {
- JSON* calibration = item;
- JSON* name = calibration->GetItemByName("Name");
- if (name && name->Value == calibrationName)
- { // found a calibration with this name
-
- int major = 0;
- JSON* version = calibration->GetItemByName("Version");
- if (version)
- major = atoi(version->Value.ToCStr());
-
- if (major > maxCalibrationVersion && major <= 2)
- {
- time_t now;
- time(&now);
-
- // parse the calibration time
- time_t calibration_time = now;
- JSON* caltime = calibration->GetItemByName("Time");
- if (caltime)
- {
- const char* caltime_str = caltime->Value.ToCStr();
-
- tm ct;
- memset(&ct, 0, sizeof(tm));
-
-#ifdef OVR_OS_WIN32
- struct tm nowtime;
- localtime_s(&nowtime, &now);
- ct.tm_isdst = nowtime.tm_isdst;
- sscanf_s(caltime_str, "%d-%d-%d %d:%d:%d",
- &ct.tm_year, &ct.tm_mon, &ct.tm_mday,
- &ct.tm_hour, &ct.tm_min, &ct.tm_sec);
-#else
- struct tm* nowtime = localtime(&now);
- ct.tm_isdst = nowtime->tm_isdst;
- sscanf(caltime_str, "%d-%d-%d %d:%d:%d",
- &ct.tm_year, &ct.tm_mon, &ct.tm_mday,
- &ct.tm_hour, &ct.tm_min, &ct.tm_sec);
-#endif
- ct.tm_year -= 1900;
- ct.tm_mon--;
- calibration_time = mktime(&ct);
- }
-
- // parse the calibration matrix
- JSON* cal = calibration->GetItemByName("CalibrationMatrix");
- if (cal == NULL)
- cal = calibration->GetItemByName("Calibration");
-
- if (cal)
- {
- Matrix4f calmat = Matrix4f::FromString(cal->Value.ToCStr());
- SetMagCalibration(calmat);
- MagCalibrationTime = calibration_time;
- EnableYawCorrection = autoEnableCorrection;
-
- maxCalibrationVersion = major;
- }
- }
- }
- }
- item = device->GetNextItem(item);
- }
-
- return (maxCalibrationVersion > 0);
- }
- }
+Vector3f SensorFusion::GetHeadModel() const
+{
+ return (Vector3f)HeadModel;
+}
- device = root->GetNextItem(device);
+void SensorFusion::SetHeadModel(const Vector3f &headModel, bool resetNeckPivot /*= true*/ )
+{
+ Lock::Locker lockScope(pHandler->GetHandlerLock());
+ // The head model should look something like (0, 0.12, -0.12), so
+ // these asserts are to try to prevent sign problems, as
+ // they can be subtle but nauseating!
+ OVR_ASSERT ( headModel.y > 0.0f );
+ OVR_ASSERT ( headModel.z < 0.0f );
+ HeadModel = (Vector3d)headModel;
+ if ( resetNeckPivot )
+ {
+ setNeckPivotFromPose ( State.Transform );
}
+}
+
+float SensorFusion::GetCenterPupilDepth() const
+{
+ return CenterPupilDepth;
+}
+
+
+void SensorFusion::SetCenterPupilDepth(float centerPupilDepth)
+{
+ CenterPupilDepth = centerPupilDepth;
+
+ CPFPositionInIMUFrame = -IMUPosition;
+ CPFPositionInIMUFrame.z += CenterPupilDepth;
+
+ setNeckPivotFromPose ( State.Transform );
+}
+
+//-------------------------------------------------------------------------------------
+
+// This is a "perceptually tuned predictive filter", which means that it is optimized
+// for improvements in the VR experience, rather than pure error. In particular,
+// jitter is more perceptible at lower speeds whereas latency is more perceptable
+// after a high-speed motion. Therefore, the prediction interval is dynamically
+// adjusted based on speed. Significant more research is needed to further improve
+// this family of filters.
+static Pose<double> calcPredictedPose(const PoseState<double>& poseState, double predictionDt)
+{
+ Pose<double> pose = poseState.Transform;
+ const double linearCoef = 1.0;
+ Vector3d angularVelocity = poseState.AngularVelocity;
+ double angularSpeed = angularVelocity.Length();
+
+ // This could be tuned so that linear and angular are combined with different coefficients
+ double speed = angularSpeed + linearCoef * poseState.LinearVelocity.Length();
+
+ const double slope = 0.2; // The rate at which the dynamic prediction interval varies
+ double candidateDt = slope * speed; // TODO: Replace with smoothstep function
+
+ double dynamicDt = predictionDt;
+
+ // Choose the candidate if it is shorter, to improve stability
+ if (candidateDt < predictionDt)
+ dynamicDt = candidateDt;
+
+ if (angularSpeed > 0.001)
+ pose.Orientation = pose.Orientation * Quatd(angularVelocity, angularSpeed * dynamicDt);
+
+ pose.Position += poseState.LinearVelocity * dynamicDt;
+
+ return pose;
+}
+
+
+Posef SensorFusion::GetPoseAtTime(double absoluteTime) const
+{
+ SensorState ss = GetSensorStateAtTime ( absoluteTime );
+ return ss.Predicted.Transform;
+}
+
+
+SensorState SensorFusion::GetSensorStateAtTime(double absoluteTime) const
+{
+ const LocklessState lstate = UpdatedState.GetState();
+ // Delta time from the last processed message
+ const double pdt = absoluteTime - lstate.State.TimeInSeconds;
+
+ SensorState ss;
+ ss.Recorded = PoseStatef(lstate.State);
+ ss.Temperature = lstate.Temperature;
+ ss.Magnetometer = Vector3f(lstate.Magnetometer);
+ ss.StatusFlags = lstate.StatusFlags;
+
+ // Do prediction logic
+ ss.Predicted = ss.Recorded;
+ ss.Predicted.TimeInSeconds = absoluteTime;
+ ss.Predicted.Transform = Posef(calcPredictedPose(lstate.State, pdt));
- return false;
+ // CPFOriginInIMUFrame transformation
+ const Vector3f cpfOriginInIMUFrame(CPFPositionInIMUFrame);
+ ss.Recorded.Transform.Position += ss.Recorded.Transform.Orientation.Rotate(cpfOriginInIMUFrame);
+ ss.Predicted.Transform.Position += ss.Predicted.Transform.Orientation.Rotate(cpfOriginInIMUFrame);
+ return ss;
+}
+
+unsigned SensorFusion::GetStatus() const
+{
+ return UpdatedState.GetState().StatusFlags;
}
+//-------------------------------------------------------------------------------------
+void SensorFusion::OnMessage(const MessageBodyFrame& msg)
+{
+ OVR_ASSERT(!IsAttachedToSensor());
+ handleMessage(msg);
+}
-} // namespace OVR
+//-------------------------------------------------------------------------------------
+void SensorFusion::BodyFrameHandler::OnMessage(const Message& msg)
+{
+ Recorder::Buffer(msg);
+ if (msg.Type == Message_BodyFrame)
+ pFusion->handleMessage(static_cast<const MessageBodyFrame&>(msg));
+ if (msg.Type == Message_ExposureFrame)
+ pFusion->handleExposure(static_cast<const MessageExposureFrame&>(msg));
+}
+
+} // namespace OVR