aboutsummaryrefslogtreecommitdiffstats
path: root/LibOVR/Src/Vision
diff options
context:
space:
mode:
authorSven Gothel <[email protected]>2015-03-28 01:43:35 +0100
committerSven Gothel <[email protected]>2015-03-28 01:43:35 +0100
commit4207f9c279e832e3afcb3f5fc6cd8d84cb4cfe4c (patch)
treecf3671058d55b47ab6cb6f36f369928606137628 /LibOVR/Src/Vision
parentc29cd1a2fbff6282bab956ad61661ac9d48c4e6e (diff)
Bump OculusVR RIFT SDK to 0.5.0.1vanilla_0.5.0.1
Diffstat (limited to 'LibOVR/Src/Vision')
-rwxr-xr-xLibOVR/Src/Vision/SensorFusion/Vision_SensorState.h161
-rwxr-xr-xLibOVR/Src/Vision/SensorFusion/Vision_SensorStateReader.cpp253
-rwxr-xr-xLibOVR/Src/Vision/SensorFusion/Vision_SensorStateReader.h121
-rw-r--r--LibOVR/Src/Vision/Vision_Common.h299
4 files changed, 834 insertions, 0 deletions
diff --git a/LibOVR/Src/Vision/SensorFusion/Vision_SensorState.h b/LibOVR/Src/Vision/SensorFusion/Vision_SensorState.h
new file mode 100755
index 0000000..13b0bb3
--- /dev/null
+++ b/LibOVR/Src/Vision/SensorFusion/Vision_SensorState.h
@@ -0,0 +1,161 @@
+/************************************************************************************
+
+Filename : Vision_SensorState.h
+Content : Sensor state information shared by tracking system with games
+Created : May 13, 2014
+Authors : Dov Katz, Chris Taylor
+
+Copyright : Copyright 2014 Oculus VR, Inc. All Rights reserved.
+
+Licensed under the Oculus VR Rift SDK License Version 3.2 (the "License");
+you may not use the Oculus VR Rift SDK except in compliance with the License,
+which is provided at the time of installation or download, or which
+otherwise accompanies this software in either electronic or hard copy form.
+
+You may obtain a copy of the License at
+
+http://www.oculusvr.com/licenses/LICENSE-3.2
+
+Unless required by applicable law or agreed to in writing, the Oculus VR SDK
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+
+*************************************************************************************/
+
+#ifndef OVR_Vision_SensorState_h
+#define OVR_Vision_SensorState_h
+
+#include "Vision/Vision_Common.h"
+#include "Kernel/OVR_SharedMemory.h"
+#include "Kernel/OVR_Lockless.h"
+#include "Kernel/OVR_String.h"
+#include "Util/Util_LatencyTest2State.h"
+#include "Sensors/OVR_DeviceConstants.h"
+
+
+namespace OVR { namespace Vision {
+
+
+// Bit flags describing the current status of sensor tracking.
+// These values must be the same as ovrStatusBits
+enum StatusBits
+{
+ // Tracked bits: Toggled by SensorFusion
+ Status_OrientationTracked = 0x0001, // Orientation is currently tracked (connected and in use)
+ Status_PositionTracked = 0x0002, // Position is currently tracked (false if out of range)
+ Status_CameraPoseTracked = 0x0004, // Camera pose is currently tracked
+
+ // Connected bits: Toggled by TrackingManager
+ Status_PositionConnected = 0x0020, // Position tracking HW is connected
+ Status_BuiltinConnected = 0x0040, // Builtin tracking HW is connected
+ Status_HMDConnected = 0x0080, // HMD is available & connected
+
+ // Masks
+ Status_AllMask = 0xffff,
+ Status_TrackingMask = Status_PositionTracked | Status_OrientationTracked | Status_CameraPoseTracked,
+ Status_ConnectedMask = Status_PositionConnected | Status_HMDConnected,
+};
+
+#pragma pack(push, 8)
+
+// TrackedObject state stored in lockless updater "queue" and used for
+// prediction by SensorStateReader
+struct LocklessSensorState
+{
+ PoseState<double> WorldFromImu;
+ SensorDataType RawSensorData;
+
+ // DO NOT USE
+ // only preserved for backwards compatibility
+ Pose<double> WorldFromCamera_DEPRECATED;
+
+ uint32_t StatusFlags;
+ uint32_t _PAD_0_;
+
+ // ImuFromCpf for HMD pose tracking
+ Posed ImuFromCpf;
+
+ // Initialized to invalid state
+ LocklessSensorState() :
+ WorldFromImu()
+ , RawSensorData()
+ , WorldFromCamera_DEPRECATED()
+ , StatusFlags(0)
+ , _PAD_0_(0) // This assignment should be irrelevant, but it quells static/runtime analysis complaints.
+ , ImuFromCpf()
+ {
+ }
+};
+
+static_assert((sizeof(LocklessSensorState) == sizeof(PoseState<double>) + sizeof(SensorDataType) + sizeof(Pose<double>) + 2*sizeof(uint32_t) + sizeof(Posed)), "sizeof(LocklessSensorState) failure");
+
+struct LocklessCameraState
+{
+ Pose<double> WorldFromCamera;
+
+ uint32_t StatusFlags;
+ uint32_t _PAD_0_;
+
+ // Initialized to invalid state
+ LocklessCameraState() :
+ WorldFromCamera()
+ , StatusFlags(0)
+ , _PAD_0_(0) // This assignment should be irrelevant, but it quells static/runtime analysis complaints.
+ {
+ }
+};
+
+static_assert((sizeof(LocklessCameraState) == sizeof(Pose<double>) + 2 * sizeof(uint32_t)), "sizeof(LocklessCameraState) failure");
+
+
+// Padded out version stored in the updater slots
+// Designed to be a larger fixed size to allow the data to grow in the future
+// without breaking older compiled code.
+OVR_DISABLE_MSVC_WARNING(4351)
+template <class Payload, int PaddingSize>
+struct LocklessPadding
+{
+ uint8_t buffer[PaddingSize];
+
+ LocklessPadding() : buffer() { }
+
+ LocklessPadding& operator=(const Payload& rhs)
+ {
+ // if this fires off, then increase PaddingSize
+ // IMPORTANT: this WILL break backwards compatibility
+ static_assert(sizeof(buffer) >= sizeof(Payload), "PaddingSize is too small");
+
+ memcpy(buffer, &rhs, sizeof(Payload));
+ return *this;
+ }
+
+ operator Payload() const
+ {
+ Payload result;
+ memcpy(&result, buffer, sizeof(Payload));
+ return result;
+ }
+};
+OVR_RESTORE_MSVC_WARNING()
+
+#pragma pack(pop)
+
+//// Lockless updaters
+
+struct CombinedHmdUpdater
+{
+ // IMPORTANT: do not add more data to this struct
+ // new objects should have their own shared memory blocks
+ LocklessUpdater<LocklessSensorState, LocklessPadding<LocklessSensorState, 512> > SensorState;
+ LocklessUpdater<Util::FrameTimeRecordSet, Util::FrameTimeRecordSet> LatencyTest;
+};
+
+
+typedef LocklessUpdater<LocklessCameraState, LocklessPadding<LocklessCameraState, 512> > CameraStateUpdater;
+
+
+}} // namespace OVR::Vision
+
+#endif
diff --git a/LibOVR/Src/Vision/SensorFusion/Vision_SensorStateReader.cpp b/LibOVR/Src/Vision/SensorFusion/Vision_SensorStateReader.cpp
new file mode 100755
index 0000000..b60b500
--- /dev/null
+++ b/LibOVR/Src/Vision/SensorFusion/Vision_SensorStateReader.cpp
@@ -0,0 +1,253 @@
+/************************************************************************************
+
+Filename : Vision_SensorStateReader.cpp
+Content : Separate reader component that is able to recover sensor pose
+Created : June 4, 2014
+Authors : Chris Taylor
+
+Copyright : Copyright 2014 Oculus VR, LLC All Rights reserved.
+
+Licensed under the Oculus VR Rift SDK License Version 3.2 (the "License");
+you may not use the Oculus VR Rift SDK except in compliance with the License,
+which is provided at the time of installation or download, or which
+otherwise accompanies this software in either electronic or hard copy form.
+
+You may obtain a copy of the License at
+
+http://www.oculusvr.com/licenses/LICENSE-3.2
+
+Unless required by applicable law or agreed to in writing, the Oculus VR SDK
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+
+*************************************************************************************/
+
+#include "Vision_SensorStateReader.h"
+
+
+namespace OVR { namespace Vision {
+
+
+//-------------------------------------------------------------------------------------
+
+// This is a "perceptually tuned predictive filter", which means that it is optimized
+// for improvements in the VR experience, rather than pure error. In particular,
+// jitter is more perceptible at lower speeds whereas latency is more perceptible
+// after a high-speed motion. Therefore, the prediction interval is dynamically
+// adjusted based on speed. Significant more research is needed to further improve
+// this family of filters.
+static Pose<double> calcPredictedPose(const PoseState<double>& poseState, double predictionDt)
+{
+ Pose<double> pose = poseState.ThePose;
+ const double linearCoef = 1.0;
+ Vector3d angularVelocity = poseState.AngularVelocity;
+ double angularSpeed = angularVelocity.Length();
+
+ // This could be tuned so that linear and angular are combined with different coefficients
+ double speed = angularSpeed + linearCoef * poseState.LinearVelocity.Length();
+
+ const double slope = 0.2; // The rate at which the dynamic prediction interval varies
+ double candidateDt = slope * speed; // TODO: Replace with smoothstep function
+
+ double dynamicDt = predictionDt;
+
+ // Choose the candidate if it is shorter, to improve stability
+ if (candidateDt < predictionDt)
+ {
+ dynamicDt = candidateDt;
+ }
+
+ if (angularSpeed > 0.001)
+ {
+ pose.Rotation = pose.Rotation * Quatd(angularVelocity, angularSpeed * dynamicDt);
+ }
+
+ pose.Translation += poseState.LinearVelocity * dynamicDt;
+
+ return pose;
+}
+
+PoseState<float> calcPredictedPoseState(const LocklessSensorState& sensorState, double absoluteTime, const Posed& centeredFromWorld)
+{
+ // Delta time from the last available data
+ double pdt = absoluteTime - sensorState.WorldFromImu.TimeInSeconds;
+ static const double maxPdt = 0.1;
+
+ // If delta went negative due to synchronization problems between processes or just a lag spike,
+ if (pdt < 0)
+ {
+ pdt = 0;
+ }
+ else if (pdt > maxPdt)
+ {
+ pdt = maxPdt;
+ static double lastLatWarnTime = 0;
+ if (lastLatWarnTime != sensorState.WorldFromImu.TimeInSeconds)
+ {
+ lastLatWarnTime = sensorState.WorldFromImu.TimeInSeconds;
+ LogText("[TrackingStateReader] Prediction interval too high: %f s, clamping at %f s\n", pdt, maxPdt);
+ }
+ }
+
+ PoseStatef result;
+ result = PoseStatef(sensorState.WorldFromImu);
+ result.TimeInSeconds = absoluteTime;
+ result.ThePose = Posef(centeredFromWorld * calcPredictedPose(sensorState.WorldFromImu, pdt) * sensorState.ImuFromCpf);
+ return result;
+}
+
+//// TrackingStateReader
+
+// Pre-0.5.0 applications assume that the initial WorldFromCentered
+// pose is always identity, because the WorldFromImu pose has a 180-degree flip in Y
+// and a 1-meter offset in Z. See CAPI_HMDState.cpp
+Posed TrackingStateReader::DefaultWorldFromCentered(Quatd::Identity(), Vector3d(0, 0, 0));
+
+// At startup, we want an identity pose when the user is looking along the positive camera Z axis, one meter in front of camera.
+// That is a 180 degree rotation about Y, with a -1 meter translation (the inverse of this pose, CenteredFromWorld, is actually used)
+// (NOTE: This pose should be the same as SensorFusionFilter::DefaultWorldFromImu)
+//
+//Posed TrackingStateReader::DefaultWorldFromCentered(Quatd(0, 1, 0, 0), Vector3d(0, 0, -1));
+
+TrackingStateReader::TrackingStateReader() :
+ HmdUpdater(nullptr),
+ CameraUpdater(nullptr)
+{
+ CenteredFromWorld = GetDefaultCenteredFromWorld();
+}
+
+void TrackingStateReader::SetUpdaters(const CombinedHmdUpdater *hmd, const CameraStateUpdater *camera)
+{
+ HmdUpdater = hmd;
+ CameraUpdater = camera;
+}
+
+
+// This function centers tracking on the current pose, such that when the
+// headset is positioned at the current pose and looking level in the current direction,
+// the tracking system pose will be identity.
+// In other words, tracking is relative to this centered pose.
+//
+bool TrackingStateReader::RecenterPose(const Vector3d& neckModelOffset)
+{
+ if (!HmdUpdater)
+ return false;
+
+ const LocklessSensorState lstate = HmdUpdater->SensorState.GetState();
+ Posed worldFromCpf = (lstate.WorldFromImu.ThePose * lstate.ImuFromCpf);
+
+ return ComputeCenteredFromWorld(worldFromCpf, neckModelOffset);
+}
+
+bool TrackingStateReader::ComputeCenteredFromWorld(const Posed& worldFromCpf, const Vector3d& neckModel)
+{
+ // Position of CPF in the head rotation center frame
+ const Vector3d cpfInRotationCenter = neckModel;
+
+ const Vector3d forward(0, 0, -1);
+ const Vector3d up(0, 1, 0);
+ Vector3d look = worldFromCpf.Rotate(forward);
+
+ // If the headset is pointed straight up or straight down,
+ // it may be face down on a tabletop. In this case we
+ // can't reliably extract a heading angle.
+ // We assume straight ahead and return false so caller
+ // knows that recenter may not be reliable.
+ bool headingValid = true;
+ static const double lookTol = cos(DegreeToRad(20.0));
+ if (fabs(look.Dot(up)) >= lookTol) // fabs(lookBack.Dot(up))
+ {
+ look = forward;
+ headingValid = false;
+ }
+
+ // Now compute the orientation of the headset when looking straight ahead:
+ // Extract the heading (yaw) component of the pose
+ Vector3d centeredLook = Vector3d(look.x, 0, look.z).Normalized();
+ Quatd centeredOrientation = Quatd::Align(centeredLook, forward);
+
+ // Compute the position in world space of the head rotation center:
+ // we assume the head rotates about this point in space.
+ Vector3d headRotationCenter = worldFromCpf.Transform(-cpfInRotationCenter);
+
+ // Now apply the heading rotation to compute the reference position of the CPF
+ // relative to the head rotation center.
+ Vector3d centeredCpfPos = headRotationCenter + centeredOrientation.Rotate(cpfInRotationCenter);
+
+ // Now compute the centered pose of the CPF.
+ Posed worldFromCentered(centeredOrientation, centeredCpfPos);
+
+ // For tracking, we use the inverse of the centered pose
+ CenteredFromWorld = worldFromCentered.Inverted();
+
+ return headingValid;
+}
+
+bool TrackingStateReader::GetTrackingStateAtTime(double absoluteTime, TrackingState& ss) const
+{
+ LocklessCameraState cameraState;
+ LocklessSensorState sensorState;
+
+ if (CameraUpdater)
+ cameraState = CameraUpdater->GetState();
+ if (HmdUpdater)
+ sensorState = HmdUpdater->SensorState.GetState();
+
+ // Update the status flags
+ ss.StatusFlags = cameraState.StatusFlags | sensorState.StatusFlags;
+
+ // If no hardware is connected, override the tracking flags
+ if (0 == (ss.StatusFlags & Status_HMDConnected))
+ {
+ ss.StatusFlags &= ~Status_TrackingMask;
+ }
+ if (0 == (ss.StatusFlags & Status_PositionConnected))
+ {
+ ss.StatusFlags &= ~(Status_PositionTracked | Status_CameraPoseTracked);
+ }
+
+ // If tracking info is invalid,
+ if (0 == (ss.StatusFlags & Status_TrackingMask))
+ {
+ return false;
+ }
+
+ ss.HeadPose = calcPredictedPoseState(sensorState, absoluteTime, CenteredFromWorld);
+
+ ss.CameraPose = Posef(CenteredFromWorld * cameraState.WorldFromCamera);
+ ss.LeveledCameraPose = Posef(CenteredFromWorld * Posed(Quatd(), cameraState.WorldFromCamera.Translation));
+
+ ss.RawSensorData = sensorState.RawSensorData;
+ return true;
+}
+
+bool TrackingStateReader::GetPoseAtTime(double absoluteTime, Posef& transform) const
+{
+ TrackingState ss;
+
+ if (!GetTrackingStateAtTime(absoluteTime, ss))
+ {
+ return false;
+ }
+
+ transform = ss.HeadPose.ThePose;
+
+ return true;
+}
+
+uint32_t TrackingStateReader::GetStatus() const
+{
+ TrackingState ss;
+
+ if (!GetTrackingStateAtTime(0, ss))
+ {
+ return 0;
+ }
+
+ return ss.StatusFlags;
+}
+
+
+}} // namespace OVR::Vision
diff --git a/LibOVR/Src/Vision/SensorFusion/Vision_SensorStateReader.h b/LibOVR/Src/Vision/SensorFusion/Vision_SensorStateReader.h
new file mode 100755
index 0000000..264a973
--- /dev/null
+++ b/LibOVR/Src/Vision/SensorFusion/Vision_SensorStateReader.h
@@ -0,0 +1,121 @@
+/************************************************************************************
+
+Filename : Vision_SensorStateReader.h
+Content : Separate reader component that is able to recover sensor pose
+Created : June 4, 2014
+Authors : Chris Taylor
+
+Copyright : Copyright 2014 Oculus VR, LLC All Rights reserved.
+
+Licensed under the Oculus VR Rift SDK License Version 3.2 (the "License");
+you may not use the Oculus VR Rift SDK except in compliance with the License,
+which is provided at the time of installation or download, or which
+otherwise accompanies this software in either electronic or hard copy form.
+
+You may obtain a copy of the License at
+
+http://www.oculusvr.com/licenses/LICENSE-3.2
+
+Unless required by applicable law or agreed to in writing, the Oculus VR SDK
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+
+*************************************************************************************/
+
+#ifndef OVR_Vision_SensorStateReader_h
+#define OVR_Vision_SensorStateReader_h
+
+#include "Kernel/OVR_Lockless.h"
+#include "Vision_SensorState.h"
+
+#include "OVR_Profile.h"
+
+
+// CAPI forward declarations.
+struct ovrTrackingState_;
+typedef struct ovrTrackingState_ ovrTrackingState;
+
+namespace OVR { namespace Vision {
+
+
+//-----------------------------------------------------------------------------
+// TrackingStateReader
+
+// Full output of tracking reported by GetSensorStateAtTime()
+class TrackingState
+{
+public:
+ TrackingState() : StatusFlags(0) { }
+
+ // C-interop support
+ TrackingState(const ovrTrackingState& s);
+ operator ovrTrackingState () const;
+
+ // HMD pose information for the requested time.
+ PoseStatef HeadPose;
+
+ // Orientation and position of the external camera, if present.
+ Posef CameraPose;
+ // Orientation and position of the camera after alignment with gravity
+ Posef LeveledCameraPose;
+
+ // Most recent sensor data received from the HMD
+ SensorDataType RawSensorData;
+
+ // Sensor status described by ovrStatusBits.
+ uint32_t StatusFlags;
+
+};
+
+// User interface to retrieve pose from the sensor fusion subsystem
+class TrackingStateReader : public NewOverrideBase
+{
+protected:
+ const CombinedHmdUpdater* HmdUpdater;
+ const CameraStateUpdater* CameraUpdater;
+
+ // Transform from real-world coordinates to centered coordinates
+ Posed CenteredFromWorld;
+ static Posed DefaultWorldFromCentered;
+
+public:
+ TrackingStateReader();
+
+ // Initialize the updaters
+ void SetUpdaters(const CombinedHmdUpdater *hmd, const CameraStateUpdater *camera);
+
+
+ // Re-centers on the current yaw and translation, taking
+ // the head-neck model into account.
+ bool RecenterPose(const Vector3d& neckModeloffset);
+
+ // Computes CenteredFromWorld from a worldFromCpf pose and neck model offset
+ bool ComputeCenteredFromWorld(const Posed& worldFromCpf, const Vector3d& neckModelOffset);
+
+ // Get the full dynamical system state of the CPF, which includes velocities and accelerations,
+ // predicted at a specified absolute point in time.
+ bool GetTrackingStateAtTime(double absoluteTime, TrackingState& state) const;
+
+ // Get the predicted pose (orientation, position) of the center pupil frame (CPF) at a specific point in time.
+ bool GetPoseAtTime(double absoluteTime, Posef& transform) const;
+
+ // Get the sensor status (same as GetSensorStateAtTime(...).Status)
+ uint32_t GetStatus() const;
+
+ Posed GetCenteredFromWorld() const
+ {
+ return CenteredFromWorld;
+ }
+
+ Posed GetDefaultCenteredFromWorld() const
+ {
+ return DefaultWorldFromCentered.Inverted();
+ }
+};
+
+
+}} // namespace OVR::Vision
+
+#endif // Vision_SensorStateReader_h
diff --git a/LibOVR/Src/Vision/Vision_Common.h b/LibOVR/Src/Vision/Vision_Common.h
new file mode 100644
index 0000000..0610adb
--- /dev/null
+++ b/LibOVR/Src/Vision/Vision_Common.h
@@ -0,0 +1,299 @@
+/************************************************************************************
+
+Filename : OVR_Vision_Common.h
+Content : Common data structures that are used in multiple vision files
+Created : November 25, 2014
+Authors : Max Katsev
+
+Copyright : Copyright 2014 Oculus VR, LLC All Rights reserved.
+
+Licensed under the Oculus VR Rift SDK License Version 3.2 (the "License");
+you may not use the Oculus VR Rift SDK except in compliance with the License,
+which is provided at the time of installation or download, or which
+otherwise accompanies this software in either electronic or hard copy form.
+
+You may obtain a copy of the License at
+
+http://www.oculusvr.com/licenses/LICENSE-3.2
+
+Unless required by applicable law or agreed to in writing, the Oculus VR SDK
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+
+*************************************************************************************/
+
+#ifndef OVR_Vision_Common_h
+#define OVR_Vision_Common_h
+
+#include "Kernel/OVR_RefCount.h"
+#include "Extras/OVR_Math.h"
+#include "Kernel/OVR_Array.h"
+#include "Kernel/OVR_Log.h"
+#include "Sensors/OVR_DeviceConstants.h"
+
+// Compatible types (these are declared in global namespace)
+typedef struct ovrPoseStatef_ ovrPoseStatef;
+typedef struct ovrPoseStated_ ovrPoseStated;
+
+namespace OVR { namespace Vision {
+
+// Global "calibration mode" used by calibration tools to change
+// the behavior of the SDK for calibration/experimentation purposes.
+// This flag is set at system startup by calibration tools, and never changed.
+
+extern int BundleCalibrationMode;
+
+// Vision <-> OVR transform functions
+//
+// These transforms are required across the interface to many of the
+// matching and reconstruction functions.
+//
+// OVR system is x+ right, y+ up, z+ back.
+// Vision system is x+ right, y+ down, z+ forward.
+// This is a 180 degree rotation about X axis.
+//
+template<typename T> inline Vector3<T> VisionFromOvr(const Vector3<T>& ovr) { return Vector3<T>(ovr.x, -ovr.y, -ovr.z); }
+template<typename T> inline Vector3<T> OvrFromVision(const Vector3<T>& vision) { return Vector3<T>(vision.x, -vision.y, -vision.z); }
+
+template<typename T> inline Quat<T> VisionFromOvr(const Quat<T>& ovr) { return Quat<T>(ovr.x, -ovr.y, -ovr.z, ovr.w); }
+template<typename T> inline Quat<T> OvrFromVision(const Quat<T>& vision) { return Quat<T>(vision.x, -vision.y, -vision.z, vision.w); }
+
+template<typename T> inline Pose<T> VisionFromOvr(const Pose<T>& ovr) { return Pose<T>(VisionFromOvr(ovr.Rotation), VisionFromOvr(ovr.Translation)); }
+template<typename T> inline Pose<T> OvrFromVision(const Pose<T>& vision) { return Pose<T>(OvrFromVision(vision.Rotation), OvrFromVision(vision.Translation)); }
+
+struct ImuSample
+{
+ double Time;
+
+ Vector3d Accelerometer;
+ Vector3d Gyro;
+ Vector3d Magnetometer;
+ double Temperature;
+
+ ImuSample() : Time(-1),
+ Temperature(-1) {}
+
+ ImuSample(const SensorDataType& data) : Time(data.AbsoluteTimeSeconds),
+ Accelerometer(data.Acceleration),
+ Gyro(data.RotationRate),
+ Magnetometer(data.MagneticField),
+ Temperature(data.Temperature) {}
+};
+
+struct PoseSample
+{
+ double Time;
+ Posed ThePose;
+ Vector3d LinearVelocity, AngularVelocity;
+
+ // stats for LED tracking
+ int LedsCount;
+ double ObjectSpaceError;
+ // stats for sphere tracking
+ int ContourCount;
+ double CircleRadius;
+
+ bool HasOrientation, HasPosition, HasVelocities;
+ // true => ThePose == WorldFromImu, false => ThePose == CameraFromImu
+ bool IsInWorldFrame;
+
+ void ApplyWorldFromCamera(const Posed& worldFromCamera)
+ {
+ OVR_ASSERT(!IsInWorldFrame);
+
+ IsInWorldFrame = true;
+ ThePose = worldFromCamera * ThePose;
+ if (HasVelocities)
+ {
+ LinearVelocity = worldFromCamera.Rotate(LinearVelocity);
+ AngularVelocity = worldFromCamera.Rotate(AngularVelocity);
+ }
+ }
+
+ friend PoseSample operator*(const PoseSample& sample, const Posed& trans)
+ {
+ PoseSample result = sample;
+ result.ThePose = sample.ThePose * trans;
+ // if we don't have orientation, the result will be useless - this is probably not expected to happen
+ OVR_ASSERT(sample.HasOrientation);
+ result.HasPosition = sample.HasPosition && sample.HasOrientation;
+ return result;
+ }
+
+ PoseSample(double time = -1) : Time(time),
+ LedsCount(-1),
+ ObjectSpaceError(-1),
+ ContourCount(-1),
+ CircleRadius(-1),
+ HasOrientation(false),
+ HasPosition(false),
+ HasVelocities(false),
+ IsInWorldFrame(false) {}
+};
+
+struct PoseEstimate
+{
+ Posed WorldFromImu, CameraFromWorld;
+
+ bool HasPosition, HasOrientation, HasUp;
+
+ Posed CameraFromImu() const
+ {
+ return CameraFromWorld * WorldFromImu;
+ }
+
+ friend PoseEstimate operator*(const PoseEstimate& estimate, const Posed& trans)
+{
+ PoseEstimate result = estimate;
+ result.WorldFromImu = estimate.WorldFromImu * trans;
+ // if we don't have orientation, the result will be useless - this is probably not expected to happen
+ OVR_ASSERT(estimate.HasOrientation);
+ result.HasPosition = estimate.HasPosition && estimate.HasOrientation;
+ return result;
+ }
+
+ PoseEstimate(const Posed& worldFromCamera) :
+ CameraFromWorld(worldFromCamera.Inverted()),
+ HasPosition(false),
+ HasOrientation(false),
+ HasUp(false) {}
+};
+
+} // namespace OVR::Vision
+
+// PoseState describes the complete pose, or a rigid body configuration, at a
+// point in time, including first and second derivatives. It is used to specify
+// instantaneous location and movement of the headset.
+// SensorState is returned as a part of the sensor state.
+
+template<class T>
+class PoseState
+{
+public:
+ typedef typename CompatibleTypes<Pose<T> >::Type CompatibleType;
+
+ PoseState() : TimeInSeconds(0.0) { }
+
+ // float <-> double conversion constructor.
+ explicit PoseState(const PoseState<typename Math<T>::OtherFloatType> &src)
+ : ThePose(src.ThePose),
+ AngularVelocity(src.AngularVelocity), LinearVelocity(src.LinearVelocity),
+ AngularAcceleration(src.AngularAcceleration), LinearAcceleration(src.LinearAcceleration),
+ TimeInSeconds(src.TimeInSeconds)
+ { }
+
+ // C-interop support: PoseStatef <-> ovrPoseStatef
+ PoseState(const typename CompatibleTypes<PoseState<T> >::Type& src)
+ : ThePose(src.ThePose),
+ AngularVelocity(src.AngularVelocity), LinearVelocity(src.LinearVelocity),
+ AngularAcceleration(src.AngularAcceleration), LinearAcceleration(src.LinearAcceleration),
+ TimeInSeconds(src.TimeInSeconds)
+ { }
+
+ operator typename CompatibleTypes<PoseState<T> >::Type() const
+ {
+ typename CompatibleTypes<PoseState<T> >::Type result;
+ result.ThePose = ThePose;
+ result.AngularVelocity = AngularVelocity;
+ result.LinearVelocity = LinearVelocity;
+ result.AngularAcceleration = AngularAcceleration;
+ result.LinearAcceleration = LinearAcceleration;
+ result.TimeInSeconds = TimeInSeconds;
+ return result;
+ }
+
+ Pose<T> ThePose;
+ Vector3<T> AngularVelocity;
+ Vector3<T> LinearVelocity;
+ Vector3<T> AngularAcceleration;
+ Vector3<T> LinearAcceleration;
+ // Absolute time of this state sample; always a double measured in seconds.
+ double TimeInSeconds;
+
+ // ***** Helpers for Pose integration
+
+ // Stores and integrates gyro angular velocity reading for a given time step.
+ void StoreAndIntegrateGyro(Vector3d angVel, double dt)
+ {
+ AngularVelocity = angVel;
+ ThePose.Rotation *= Quatd::FromRotationVector(angVel * dt);
+ }
+
+ void StoreAndIntegrateAccelerometer(Vector3d linearAccel, double dt)
+ {
+ LinearAcceleration = linearAccel;
+ ThePose.Translation += LinearVelocity * dt + LinearAcceleration * (dt * dt * 0.5);
+ LinearVelocity += LinearAcceleration * dt;
+ }
+
+ friend PoseState operator*(const Pose<T>& trans, const PoseState& poseState)
+{
+ PoseState result;
+ result.ThePose = trans * poseState.ThePose;
+ result.LinearVelocity = trans.Rotate(poseState.LinearVelocity);
+ result.LinearAcceleration = trans.Rotate(poseState.LinearAcceleration);
+ result.AngularVelocity = trans.Rotate(poseState.AngularVelocity);
+ result.AngularAcceleration = trans.Rotate(poseState.AngularAcceleration);
+ return result;
+}
+};
+
+// External API returns pose as float, but uses doubles internally for quaternion precision.
+typedef PoseState<float> PoseStatef;
+typedef PoseState<double> PoseStated;
+
+// Compatible types
+template<> struct CompatibleTypes<PoseState<float> > { typedef ovrPoseStatef Type; };
+template<> struct CompatibleTypes<PoseState<double> > { typedef ovrPoseStated Type; };
+
+// Handy debug output functions
+template<typename T>
+void Dump(const char* label, const Pose<T>& pose)
+{
+ auto t = pose.Translation * 1000;
+ auto r = pose.Rotation.ToRotationVector();
+ double angle = RadToDegree(r.Length());
+ if (r.LengthSq() > 0)
+ r.Normalize();
+ LogText("%s: %.2f, %.2f, %.2f mm, %.2f deg %.2f, %.2f, %.2f\n",
+ label, t.x, t.y, t.z, angle, r.x, r.y, r.z);
+}
+
+template<typename T>
+void Dump(const char* label, const Vector3<T>& v)
+{
+ LogText("%s %.5g, %.5g, %.5g (%.5g)\n", label, v.x, v.y, v.z, v.Length());
+}
+
+template<typename T>
+void Dump(const char* label, const Quat<T>& q)
+{
+ auto r = q.ToRotationVector();
+ auto axis = r.Normalized();
+ auto angle = RadToDegree(r.Length());
+ LogText("%s %.2f (%.2f, %.2f, %.2f)\n", label, angle, axis.x, axis.y, axis.z);
+}
+
+template<typename T>
+void Dump(const char* label, double time, const Pose<T>& p)
+{
+ LogText("%.4f: ", time);
+ Dump(label, p);
+}
+
+static_assert((sizeof(PoseState<double>) == sizeof(Pose<double>) + 4 * sizeof(Vector3<double>) + sizeof(double)), "sizeof(PoseState<double>) failure");
+#ifdef OVR_CPU_X86_64
+static_assert((sizeof(PoseState<float>) == sizeof(Pose<float>) + 4 * sizeof(Vector3<float>) + sizeof(uint32_t)+sizeof(double)), "sizeof(PoseState<float>) failure"); //TODO: Manually pad template.
+#elif defined(OVR_OS_WIN32) // The Windows 32 bit ABI aligns 64 bit values on 64 bit boundaries
+static_assert((sizeof(PoseState<float>) == sizeof(Pose<float>) + 4 * sizeof(Vector3<float>) + sizeof(uint32_t)+sizeof(double)), "sizeof(PoseState<float>) failure");
+#elif defined(OVR_CPU_ARM) // ARM aligns 64 bit values to 64 bit boundaries: http://infocenter.arm.com/help/index.jsp?topic=/com.arm.doc.dui0472k/chr1359125009502.html
+static_assert((sizeof(PoseState<float>) == sizeof(Pose<float>) + 4 * sizeof(Vector3<float>) + sizeof(uint32_t)+sizeof(double)), "sizeof(PoseState<float>) failure");
+#else // Else Unix/Apple 32 bit ABI, which aligns 64 bit values on 32 bit boundaries.
+static_assert((sizeof(PoseState<float>) == sizeof(Pose<float>) + 4 * sizeof(Vector3<float>) + sizeof(double)), "sizeof(PoseState<float>) failure:");
+#endif
+
+} // namespace OVR
+
+#endif // OVR_Vision_Common_h