aboutsummaryrefslogtreecommitdiffstats
path: root/LibOVR/Src/Vision/SensorFusion
diff options
context:
space:
mode:
Diffstat (limited to 'LibOVR/Src/Vision/SensorFusion')
-rwxr-xr-xLibOVR/Src/Vision/SensorFusion/Vision_SensorState.h161
-rwxr-xr-xLibOVR/Src/Vision/SensorFusion/Vision_SensorStateReader.cpp253
-rwxr-xr-xLibOVR/Src/Vision/SensorFusion/Vision_SensorStateReader.h121
3 files changed, 535 insertions, 0 deletions
diff --git a/LibOVR/Src/Vision/SensorFusion/Vision_SensorState.h b/LibOVR/Src/Vision/SensorFusion/Vision_SensorState.h
new file mode 100755
index 0000000..13b0bb3
--- /dev/null
+++ b/LibOVR/Src/Vision/SensorFusion/Vision_SensorState.h
@@ -0,0 +1,161 @@
+/************************************************************************************
+
+Filename : Vision_SensorState.h
+Content : Sensor state information shared by tracking system with games
+Created : May 13, 2014
+Authors : Dov Katz, Chris Taylor
+
+Copyright : Copyright 2014 Oculus VR, Inc. All Rights reserved.
+
+Licensed under the Oculus VR Rift SDK License Version 3.2 (the "License");
+you may not use the Oculus VR Rift SDK except in compliance with the License,
+which is provided at the time of installation or download, or which
+otherwise accompanies this software in either electronic or hard copy form.
+
+You may obtain a copy of the License at
+
+http://www.oculusvr.com/licenses/LICENSE-3.2
+
+Unless required by applicable law or agreed to in writing, the Oculus VR SDK
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+
+*************************************************************************************/
+
+#ifndef OVR_Vision_SensorState_h
+#define OVR_Vision_SensorState_h
+
+#include "Vision/Vision_Common.h"
+#include "Kernel/OVR_SharedMemory.h"
+#include "Kernel/OVR_Lockless.h"
+#include "Kernel/OVR_String.h"
+#include "Util/Util_LatencyTest2State.h"
+#include "Sensors/OVR_DeviceConstants.h"
+
+
+namespace OVR { namespace Vision {
+
+
+// Bit flags describing the current status of sensor tracking.
+// These values must be the same as ovrStatusBits
+enum StatusBits
+{
+ // Tracked bits: Toggled by SensorFusion
+ Status_OrientationTracked = 0x0001, // Orientation is currently tracked (connected and in use)
+ Status_PositionTracked = 0x0002, // Position is currently tracked (false if out of range)
+ Status_CameraPoseTracked = 0x0004, // Camera pose is currently tracked
+
+ // Connected bits: Toggled by TrackingManager
+ Status_PositionConnected = 0x0020, // Position tracking HW is connected
+ Status_BuiltinConnected = 0x0040, // Builtin tracking HW is connected
+ Status_HMDConnected = 0x0080, // HMD is available & connected
+
+ // Masks
+ Status_AllMask = 0xffff,
+ Status_TrackingMask = Status_PositionTracked | Status_OrientationTracked | Status_CameraPoseTracked,
+ Status_ConnectedMask = Status_PositionConnected | Status_HMDConnected,
+};
+
+#pragma pack(push, 8)
+
+// TrackedObject state stored in lockless updater "queue" and used for
+// prediction by SensorStateReader
+struct LocklessSensorState
+{
+ PoseState<double> WorldFromImu;
+ SensorDataType RawSensorData;
+
+ // DO NOT USE
+ // only preserved for backwards compatibility
+ Pose<double> WorldFromCamera_DEPRECATED;
+
+ uint32_t StatusFlags;
+ uint32_t _PAD_0_;
+
+ // ImuFromCpf for HMD pose tracking
+ Posed ImuFromCpf;
+
+ // Initialized to invalid state
+ LocklessSensorState() :
+ WorldFromImu()
+ , RawSensorData()
+ , WorldFromCamera_DEPRECATED()
+ , StatusFlags(0)
+ , _PAD_0_(0) // This assignment should be irrelevant, but it quells static/runtime analysis complaints.
+ , ImuFromCpf()
+ {
+ }
+};
+
+static_assert((sizeof(LocklessSensorState) == sizeof(PoseState<double>) + sizeof(SensorDataType) + sizeof(Pose<double>) + 2*sizeof(uint32_t) + sizeof(Posed)), "sizeof(LocklessSensorState) failure");
+
+struct LocklessCameraState
+{
+ Pose<double> WorldFromCamera;
+
+ uint32_t StatusFlags;
+ uint32_t _PAD_0_;
+
+ // Initialized to invalid state
+ LocklessCameraState() :
+ WorldFromCamera()
+ , StatusFlags(0)
+ , _PAD_0_(0) // This assignment should be irrelevant, but it quells static/runtime analysis complaints.
+ {
+ }
+};
+
+static_assert((sizeof(LocklessCameraState) == sizeof(Pose<double>) + 2 * sizeof(uint32_t)), "sizeof(LocklessCameraState) failure");
+
+
+// Padded out version stored in the updater slots
+// Designed to be a larger fixed size to allow the data to grow in the future
+// without breaking older compiled code.
+OVR_DISABLE_MSVC_WARNING(4351)
+template <class Payload, int PaddingSize>
+struct LocklessPadding
+{
+ uint8_t buffer[PaddingSize];
+
+ LocklessPadding() : buffer() { }
+
+ LocklessPadding& operator=(const Payload& rhs)
+ {
+ // if this fires off, then increase PaddingSize
+ // IMPORTANT: this WILL break backwards compatibility
+ static_assert(sizeof(buffer) >= sizeof(Payload), "PaddingSize is too small");
+
+ memcpy(buffer, &rhs, sizeof(Payload));
+ return *this;
+ }
+
+ operator Payload() const
+ {
+ Payload result;
+ memcpy(&result, buffer, sizeof(Payload));
+ return result;
+ }
+};
+OVR_RESTORE_MSVC_WARNING()
+
+#pragma pack(pop)
+
+//// Lockless updaters
+
+struct CombinedHmdUpdater
+{
+ // IMPORTANT: do not add more data to this struct
+ // new objects should have their own shared memory blocks
+ LocklessUpdater<LocklessSensorState, LocklessPadding<LocklessSensorState, 512> > SensorState;
+ LocklessUpdater<Util::FrameTimeRecordSet, Util::FrameTimeRecordSet> LatencyTest;
+};
+
+
+typedef LocklessUpdater<LocklessCameraState, LocklessPadding<LocklessCameraState, 512> > CameraStateUpdater;
+
+
+}} // namespace OVR::Vision
+
+#endif
diff --git a/LibOVR/Src/Vision/SensorFusion/Vision_SensorStateReader.cpp b/LibOVR/Src/Vision/SensorFusion/Vision_SensorStateReader.cpp
new file mode 100755
index 0000000..b60b500
--- /dev/null
+++ b/LibOVR/Src/Vision/SensorFusion/Vision_SensorStateReader.cpp
@@ -0,0 +1,253 @@
+/************************************************************************************
+
+Filename : Vision_SensorStateReader.cpp
+Content : Separate reader component that is able to recover sensor pose
+Created : June 4, 2014
+Authors : Chris Taylor
+
+Copyright : Copyright 2014 Oculus VR, LLC All Rights reserved.
+
+Licensed under the Oculus VR Rift SDK License Version 3.2 (the "License");
+you may not use the Oculus VR Rift SDK except in compliance with the License,
+which is provided at the time of installation or download, or which
+otherwise accompanies this software in either electronic or hard copy form.
+
+You may obtain a copy of the License at
+
+http://www.oculusvr.com/licenses/LICENSE-3.2
+
+Unless required by applicable law or agreed to in writing, the Oculus VR SDK
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+
+*************************************************************************************/
+
+#include "Vision_SensorStateReader.h"
+
+
+namespace OVR { namespace Vision {
+
+
+//-------------------------------------------------------------------------------------
+
+// This is a "perceptually tuned predictive filter", which means that it is optimized
+// for improvements in the VR experience, rather than pure error. In particular,
+// jitter is more perceptible at lower speeds whereas latency is more perceptible
+// after a high-speed motion. Therefore, the prediction interval is dynamically
+// adjusted based on speed. Significant more research is needed to further improve
+// this family of filters.
+static Pose<double> calcPredictedPose(const PoseState<double>& poseState, double predictionDt)
+{
+ Pose<double> pose = poseState.ThePose;
+ const double linearCoef = 1.0;
+ Vector3d angularVelocity = poseState.AngularVelocity;
+ double angularSpeed = angularVelocity.Length();
+
+ // This could be tuned so that linear and angular are combined with different coefficients
+ double speed = angularSpeed + linearCoef * poseState.LinearVelocity.Length();
+
+ const double slope = 0.2; // The rate at which the dynamic prediction interval varies
+ double candidateDt = slope * speed; // TODO: Replace with smoothstep function
+
+ double dynamicDt = predictionDt;
+
+ // Choose the candidate if it is shorter, to improve stability
+ if (candidateDt < predictionDt)
+ {
+ dynamicDt = candidateDt;
+ }
+
+ if (angularSpeed > 0.001)
+ {
+ pose.Rotation = pose.Rotation * Quatd(angularVelocity, angularSpeed * dynamicDt);
+ }
+
+ pose.Translation += poseState.LinearVelocity * dynamicDt;
+
+ return pose;
+}
+
+PoseState<float> calcPredictedPoseState(const LocklessSensorState& sensorState, double absoluteTime, const Posed& centeredFromWorld)
+{
+ // Delta time from the last available data
+ double pdt = absoluteTime - sensorState.WorldFromImu.TimeInSeconds;
+ static const double maxPdt = 0.1;
+
+ // If delta went negative due to synchronization problems between processes or just a lag spike,
+ if (pdt < 0)
+ {
+ pdt = 0;
+ }
+ else if (pdt > maxPdt)
+ {
+ pdt = maxPdt;
+ static double lastLatWarnTime = 0;
+ if (lastLatWarnTime != sensorState.WorldFromImu.TimeInSeconds)
+ {
+ lastLatWarnTime = sensorState.WorldFromImu.TimeInSeconds;
+ LogText("[TrackingStateReader] Prediction interval too high: %f s, clamping at %f s\n", pdt, maxPdt);
+ }
+ }
+
+ PoseStatef result;
+ result = PoseStatef(sensorState.WorldFromImu);
+ result.TimeInSeconds = absoluteTime;
+ result.ThePose = Posef(centeredFromWorld * calcPredictedPose(sensorState.WorldFromImu, pdt) * sensorState.ImuFromCpf);
+ return result;
+}
+
+//// TrackingStateReader
+
+// Pre-0.5.0 applications assume that the initial WorldFromCentered
+// pose is always identity, because the WorldFromImu pose has a 180-degree flip in Y
+// and a 1-meter offset in Z. See CAPI_HMDState.cpp
+Posed TrackingStateReader::DefaultWorldFromCentered(Quatd::Identity(), Vector3d(0, 0, 0));
+
+// At startup, we want an identity pose when the user is looking along the positive camera Z axis, one meter in front of camera.
+// That is a 180 degree rotation about Y, with a -1 meter translation (the inverse of this pose, CenteredFromWorld, is actually used)
+// (NOTE: This pose should be the same as SensorFusionFilter::DefaultWorldFromImu)
+//
+//Posed TrackingStateReader::DefaultWorldFromCentered(Quatd(0, 1, 0, 0), Vector3d(0, 0, -1));
+
+TrackingStateReader::TrackingStateReader() :
+ HmdUpdater(nullptr),
+ CameraUpdater(nullptr)
+{
+ CenteredFromWorld = GetDefaultCenteredFromWorld();
+}
+
+void TrackingStateReader::SetUpdaters(const CombinedHmdUpdater *hmd, const CameraStateUpdater *camera)
+{
+ HmdUpdater = hmd;
+ CameraUpdater = camera;
+}
+
+
+// This function centers tracking on the current pose, such that when the
+// headset is positioned at the current pose and looking level in the current direction,
+// the tracking system pose will be identity.
+// In other words, tracking is relative to this centered pose.
+//
+bool TrackingStateReader::RecenterPose(const Vector3d& neckModelOffset)
+{
+ if (!HmdUpdater)
+ return false;
+
+ const LocklessSensorState lstate = HmdUpdater->SensorState.GetState();
+ Posed worldFromCpf = (lstate.WorldFromImu.ThePose * lstate.ImuFromCpf);
+
+ return ComputeCenteredFromWorld(worldFromCpf, neckModelOffset);
+}
+
+bool TrackingStateReader::ComputeCenteredFromWorld(const Posed& worldFromCpf, const Vector3d& neckModel)
+{
+ // Position of CPF in the head rotation center frame
+ const Vector3d cpfInRotationCenter = neckModel;
+
+ const Vector3d forward(0, 0, -1);
+ const Vector3d up(0, 1, 0);
+ Vector3d look = worldFromCpf.Rotate(forward);
+
+ // If the headset is pointed straight up or straight down,
+ // it may be face down on a tabletop. In this case we
+ // can't reliably extract a heading angle.
+ // We assume straight ahead and return false so caller
+ // knows that recenter may not be reliable.
+ bool headingValid = true;
+ static const double lookTol = cos(DegreeToRad(20.0));
+ if (fabs(look.Dot(up)) >= lookTol) // fabs(lookBack.Dot(up))
+ {
+ look = forward;
+ headingValid = false;
+ }
+
+ // Now compute the orientation of the headset when looking straight ahead:
+ // Extract the heading (yaw) component of the pose
+ Vector3d centeredLook = Vector3d(look.x, 0, look.z).Normalized();
+ Quatd centeredOrientation = Quatd::Align(centeredLook, forward);
+
+ // Compute the position in world space of the head rotation center:
+ // we assume the head rotates about this point in space.
+ Vector3d headRotationCenter = worldFromCpf.Transform(-cpfInRotationCenter);
+
+ // Now apply the heading rotation to compute the reference position of the CPF
+ // relative to the head rotation center.
+ Vector3d centeredCpfPos = headRotationCenter + centeredOrientation.Rotate(cpfInRotationCenter);
+
+ // Now compute the centered pose of the CPF.
+ Posed worldFromCentered(centeredOrientation, centeredCpfPos);
+
+ // For tracking, we use the inverse of the centered pose
+ CenteredFromWorld = worldFromCentered.Inverted();
+
+ return headingValid;
+}
+
+bool TrackingStateReader::GetTrackingStateAtTime(double absoluteTime, TrackingState& ss) const
+{
+ LocklessCameraState cameraState;
+ LocklessSensorState sensorState;
+
+ if (CameraUpdater)
+ cameraState = CameraUpdater->GetState();
+ if (HmdUpdater)
+ sensorState = HmdUpdater->SensorState.GetState();
+
+ // Update the status flags
+ ss.StatusFlags = cameraState.StatusFlags | sensorState.StatusFlags;
+
+ // If no hardware is connected, override the tracking flags
+ if (0 == (ss.StatusFlags & Status_HMDConnected))
+ {
+ ss.StatusFlags &= ~Status_TrackingMask;
+ }
+ if (0 == (ss.StatusFlags & Status_PositionConnected))
+ {
+ ss.StatusFlags &= ~(Status_PositionTracked | Status_CameraPoseTracked);
+ }
+
+ // If tracking info is invalid,
+ if (0 == (ss.StatusFlags & Status_TrackingMask))
+ {
+ return false;
+ }
+
+ ss.HeadPose = calcPredictedPoseState(sensorState, absoluteTime, CenteredFromWorld);
+
+ ss.CameraPose = Posef(CenteredFromWorld * cameraState.WorldFromCamera);
+ ss.LeveledCameraPose = Posef(CenteredFromWorld * Posed(Quatd(), cameraState.WorldFromCamera.Translation));
+
+ ss.RawSensorData = sensorState.RawSensorData;
+ return true;
+}
+
+bool TrackingStateReader::GetPoseAtTime(double absoluteTime, Posef& transform) const
+{
+ TrackingState ss;
+
+ if (!GetTrackingStateAtTime(absoluteTime, ss))
+ {
+ return false;
+ }
+
+ transform = ss.HeadPose.ThePose;
+
+ return true;
+}
+
+uint32_t TrackingStateReader::GetStatus() const
+{
+ TrackingState ss;
+
+ if (!GetTrackingStateAtTime(0, ss))
+ {
+ return 0;
+ }
+
+ return ss.StatusFlags;
+}
+
+
+}} // namespace OVR::Vision
diff --git a/LibOVR/Src/Vision/SensorFusion/Vision_SensorStateReader.h b/LibOVR/Src/Vision/SensorFusion/Vision_SensorStateReader.h
new file mode 100755
index 0000000..264a973
--- /dev/null
+++ b/LibOVR/Src/Vision/SensorFusion/Vision_SensorStateReader.h
@@ -0,0 +1,121 @@
+/************************************************************************************
+
+Filename : Vision_SensorStateReader.h
+Content : Separate reader component that is able to recover sensor pose
+Created : June 4, 2014
+Authors : Chris Taylor
+
+Copyright : Copyright 2014 Oculus VR, LLC All Rights reserved.
+
+Licensed under the Oculus VR Rift SDK License Version 3.2 (the "License");
+you may not use the Oculus VR Rift SDK except in compliance with the License,
+which is provided at the time of installation or download, or which
+otherwise accompanies this software in either electronic or hard copy form.
+
+You may obtain a copy of the License at
+
+http://www.oculusvr.com/licenses/LICENSE-3.2
+
+Unless required by applicable law or agreed to in writing, the Oculus VR SDK
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+
+*************************************************************************************/
+
+#ifndef OVR_Vision_SensorStateReader_h
+#define OVR_Vision_SensorStateReader_h
+
+#include "Kernel/OVR_Lockless.h"
+#include "Vision_SensorState.h"
+
+#include "OVR_Profile.h"
+
+
+// CAPI forward declarations.
+struct ovrTrackingState_;
+typedef struct ovrTrackingState_ ovrTrackingState;
+
+namespace OVR { namespace Vision {
+
+
+//-----------------------------------------------------------------------------
+// TrackingStateReader
+
+// Full output of tracking reported by GetSensorStateAtTime()
+class TrackingState
+{
+public:
+ TrackingState() : StatusFlags(0) { }
+
+ // C-interop support
+ TrackingState(const ovrTrackingState& s);
+ operator ovrTrackingState () const;
+
+ // HMD pose information for the requested time.
+ PoseStatef HeadPose;
+
+ // Orientation and position of the external camera, if present.
+ Posef CameraPose;
+ // Orientation and position of the camera after alignment with gravity
+ Posef LeveledCameraPose;
+
+ // Most recent sensor data received from the HMD
+ SensorDataType RawSensorData;
+
+ // Sensor status described by ovrStatusBits.
+ uint32_t StatusFlags;
+
+};
+
+// User interface to retrieve pose from the sensor fusion subsystem
+class TrackingStateReader : public NewOverrideBase
+{
+protected:
+ const CombinedHmdUpdater* HmdUpdater;
+ const CameraStateUpdater* CameraUpdater;
+
+ // Transform from real-world coordinates to centered coordinates
+ Posed CenteredFromWorld;
+ static Posed DefaultWorldFromCentered;
+
+public:
+ TrackingStateReader();
+
+ // Initialize the updaters
+ void SetUpdaters(const CombinedHmdUpdater *hmd, const CameraStateUpdater *camera);
+
+
+ // Re-centers on the current yaw and translation, taking
+ // the head-neck model into account.
+ bool RecenterPose(const Vector3d& neckModeloffset);
+
+ // Computes CenteredFromWorld from a worldFromCpf pose and neck model offset
+ bool ComputeCenteredFromWorld(const Posed& worldFromCpf, const Vector3d& neckModelOffset);
+
+ // Get the full dynamical system state of the CPF, which includes velocities and accelerations,
+ // predicted at a specified absolute point in time.
+ bool GetTrackingStateAtTime(double absoluteTime, TrackingState& state) const;
+
+ // Get the predicted pose (orientation, position) of the center pupil frame (CPF) at a specific point in time.
+ bool GetPoseAtTime(double absoluteTime, Posef& transform) const;
+
+ // Get the sensor status (same as GetSensorStateAtTime(...).Status)
+ uint32_t GetStatus() const;
+
+ Posed GetCenteredFromWorld() const
+ {
+ return CenteredFromWorld;
+ }
+
+ Posed GetDefaultCenteredFromWorld() const
+ {
+ return DefaultWorldFromCentered.Inverted();
+ }
+};
+
+
+}} // namespace OVR::Vision
+
+#endif // Vision_SensorStateReader_h