本文整理汇总了C++中ovr::Matrix4f::GetTranslation方法的典型用法代码示例。如果您正苦于以下问题:C++ Matrix4f::GetTranslation方法的具体用法?C++ Matrix4f::GetTranslation怎么用?C++ Matrix4f::GetTranslation使用的例子?那么, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类ovr::Matrix4f
的用法示例。
在下文中一共展示了Matrix4f::GetTranslation方法的3个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的C++代码示例。
示例1:
OVR_PUBLIC_FUNCTION(ovrTrackerPose) ovr_GetTrackerPose(ovrSession session, unsigned int trackerPoseIndex)
{
ovrTrackerPose pose = { 0 };
// Get the index for this tracker.
vr::TrackedDeviceIndex_t trackers[vr::k_unMaxTrackedDeviceCount];
g_VRSystem->GetSortedTrackedDeviceIndicesOfClass(vr::TrackedDeviceClass_TrackingReference, trackers, vr::k_unMaxTrackedDeviceCount);
vr::TrackedDeviceIndex_t index = trackers[trackerPoseIndex];
// Set the flags
pose.TrackerFlags = 0;
if (session->poses[index].bDeviceIsConnected)
pose.TrackerFlags |= ovrTracker_Connected;
if (session->poses[index].bPoseIsValid)
pose.TrackerFlags |= ovrTracker_PoseTracked;
// Convert the pose
OVR::Matrix4f matrix;
if (session->poses[index].bPoseIsValid)
matrix = REV_HmdMatrixToOVRMatrix(session->poses[index].mDeviceToAbsoluteTracking);
OVR::Quatf quat = OVR::Quatf(matrix);
pose.Pose.Orientation = quat;
pose.Pose.Position = matrix.GetTranslation();
// Level the pose
float yaw;
quat.GetYawPitchRoll(&yaw, nullptr, nullptr);
pose.LeveledPose.Orientation = OVR::Quatf(OVR::Axis_Y, yaw);
pose.LeveledPose.Position = matrix.GetTranslation();
return pose;
}
示例2:
OVR_PUBLIC_FUNCTION(ovrTrackingState) ovr_GetTrackingState(ovrSession session, double absTime, ovrBool latencyMarker)
{
ovrTrackingState state = { 0 };
if (!session)
return state;
// Gain focus for the compositor
float time = (float)ovr_GetTimeInSeconds();
// Get the absolute tracking poses
vr::TrackedDevicePose_t* poses = session->poses;
// Convert the head pose
state.HeadPose = REV_TrackedDevicePoseToOVRPose(poses[vr::k_unTrackedDeviceIndex_Hmd], time);
state.StatusFlags = REV_TrackedDevicePoseToOVRStatusFlags(poses[vr::k_unTrackedDeviceIndex_Hmd]);
// Convert the hand poses
vr::TrackedDeviceIndex_t hands[] = { g_VRSystem->GetTrackedDeviceIndexForControllerRole(vr::TrackedControllerRole_LeftHand),
g_VRSystem->GetTrackedDeviceIndexForControllerRole(vr::TrackedControllerRole_RightHand) };
for (int i = 0; i < ovrHand_Count; i++)
{
vr::TrackedDeviceIndex_t deviceIndex = hands[i];
if (deviceIndex == vr::k_unTrackedDeviceIndexInvalid)
{
state.HandPoses[i].ThePose = OVR::Posef::Identity();
continue;
}
state.HandPoses[i] = REV_TrackedDevicePoseToOVRPose(poses[deviceIndex], time);
state.HandStatusFlags[i] = REV_TrackedDevicePoseToOVRStatusFlags(poses[deviceIndex]);
}
OVR::Matrix4f origin = REV_HmdMatrixToOVRMatrix(g_VRSystem->GetSeatedZeroPoseToStandingAbsoluteTrackingPose());
// The calibrated origin should be the location of the seated origin relative to the absolute tracking space.
// It currently describes the location of the absolute origin relative to the seated origin, so we have to invert it.
origin.Invert();
state.CalibratedOrigin.Orientation = OVR::Quatf(origin);
state.CalibratedOrigin.Position = origin.GetTranslation();
return state;
}
示例3: REV_TrackedDevicePoseToOVRPose
ovrPoseStatef REV_TrackedDevicePoseToOVRPose(vr::TrackedDevicePose_t pose, double time)
{
ovrPoseStatef result = { 0 };
result.ThePose = OVR::Posef::Identity();
OVR::Matrix4f matrix;
if (pose.bPoseIsValid)
matrix = REV_HmdMatrixToOVRMatrix(pose.mDeviceToAbsoluteTracking);
else
return result;
result.ThePose.Orientation = OVR::Quatf(matrix);
result.ThePose.Position = matrix.GetTranslation();
result.AngularVelocity = REV_HmdVectorToOVRVector(pose.vAngularVelocity);
result.LinearVelocity = REV_HmdVectorToOVRVector(pose.vVelocity);
// TODO: Calculate acceleration.
result.AngularAcceleration = ovrVector3f();
result.LinearAcceleration = ovrVector3f();
result.TimeInSeconds = time;
return result;
}