本文整理汇总了C++中ovr::Matrix4f类的典型用法代码示例。如果您正苦于以下问题:C++ Matrix4f类的具体用法?C++ Matrix4f怎么用?C++ Matrix4f使用的例子?那么, 这里精选的类代码示例或许可以为您提供帮助。
在下文中一共展示了Matrix4f类的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的C++代码示例。
示例1: AssembleViewMatrix
/// From the OVR SDK.
void OculusAppSkeleton::AssembleViewMatrix()
{
// Rotate and position m_oculusView Camera, using YawPitchRoll in BodyFrame coordinates.
//
OVR::Matrix4f rollPitchYaw = GetRollPitchYaw();
OVR::Vector3f up = rollPitchYaw.Transform(UpVector);
OVR::Vector3f forward = rollPitchYaw.Transform(ForwardVector);
// Minimal head modelling.
float headBaseToEyeHeight = 0.15f; // Vertical height of eye from base of head
float headBaseToEyeProtrusion = 0.09f; // Distance forward of eye from base of head
OVR::Vector3f eyeCenterInHeadFrame(0.0f, headBaseToEyeHeight, -headBaseToEyeProtrusion);
OVR::Vector3f shiftedEyePos = EyePos + rollPitchYaw.Transform(eyeCenterInHeadFrame);
shiftedEyePos.y -= eyeCenterInHeadFrame.y; // Bring the head back down to original height
m_oculusView = OVR::Matrix4f::LookAtRH(shiftedEyePos, shiftedEyePos + forward, up);
// This is what transformation would be without head modeling.
// m_oculusView = Matrix4f::LookAtRH(EyePos, EyePos + forward, up);
/// Set up a third person(or otherwise) view for control window
{
OVR::Vector3f txFollowDisp = rollPitchYaw.Transform(FollowCamDisplacement);
FollowCamPos = EyePos + txFollowDisp;
m_controlView = OVR::Matrix4f::LookAtRH(FollowCamPos, EyePos, up);
}
}
示例2: _drawSceneMono
void RiftAppSkeleton::_drawSceneMono() const
{
_resetGLState();
glClearColor(0.0f, 0.0f, 0.0f, 0.0f);
glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT);
const int w = m_Cfg.OGL.Header.RTSize.w;
const int h = m_Cfg.OGL.Header.RTSize.h;
const glm::vec3 EyePos(m_chassisPos.x, m_chassisPos.y, m_chassisPos.z);
const glm::vec3 LookVec(0.0f, 0.0f, -1.0f);
const glm::vec3 up(0.0f, 1.0f, 0.0f);
ovrPosef eyePose;
eyePose.Orientation = OVR::Quatf();
eyePose.Position = OVR::Vector3f();
const OVR::Matrix4f view = _MakeModelviewMatrix(
eyePose,
OVR::Vector3f(0.0f),
m_chassisYaw,
m_chassisPos);
const glm::mat4 persp = glm::perspective(
90.0f,
static_cast<float>(w)/static_cast<float>(h),
0.004f,
500.0f);
ovrRecti rvp = {0,0,w,h};
_DrawScenes(&view.Transposed().M[0][0], glm::value_ptr(persp), rvp);
}
示例3: RenderForOneEye
void OVRScene::RenderForOneEye(const float* pMview, const float* pPersp) const
{
printf("Rendering in OVRScene!\n");
if (m_bDraw == false)
return;
if (pMview == false)
return;
if (pPersp == false)
return;
const glm::mat4 modelview = glm::make_mat4(pMview);
const glm::mat4 projection = glm::make_mat4(pPersp);
// Assemble modelview matrix to lock camera in with real world geometry:
// We still have to use the assembled HMD stereo modelview matrices from OVRSDK05AppSkeleton,
// but we undo the effects of chassis yaw and position so the frustum follows the viewer.
if (m_pHmd != NULL)
{
const ovrTrackingState ts = ovrHmd_GetTrackingState(m_pHmd, ovr_GetTimeInSeconds());
const ovrPosef& cp = ts.CameraPose;
OVR::Matrix4f camMtx = OVR::Matrix4f();
camMtx *= OVR::Matrix4f::Translation(cp.Position)
* OVR::Matrix4f(OVR::Quatf(cp.Orientation));
glm::mat4 ogmat = glm::make_mat4(&camMtx.Transposed().M[0][0]);
DrawScene(modelview * ogmat, projection);
}
}
示例4:
OVR_PUBLIC_FUNCTION(ovrTrackerPose) ovr_GetTrackerPose(ovrSession session, unsigned int trackerPoseIndex)
{
ovrTrackerPose pose = { 0 };
// Get the index for this tracker.
vr::TrackedDeviceIndex_t trackers[vr::k_unMaxTrackedDeviceCount];
g_VRSystem->GetSortedTrackedDeviceIndicesOfClass(vr::TrackedDeviceClass_TrackingReference, trackers, vr::k_unMaxTrackedDeviceCount);
vr::TrackedDeviceIndex_t index = trackers[trackerPoseIndex];
// Set the flags
pose.TrackerFlags = 0;
if (session->poses[index].bDeviceIsConnected)
pose.TrackerFlags |= ovrTracker_Connected;
if (session->poses[index].bPoseIsValid)
pose.TrackerFlags |= ovrTracker_PoseTracked;
// Convert the pose
OVR::Matrix4f matrix;
if (session->poses[index].bPoseIsValid)
matrix = REV_HmdMatrixToOVRMatrix(session->poses[index].mDeviceToAbsoluteTracking);
OVR::Quatf quat = OVR::Quatf(matrix);
pose.Pose.Orientation = quat;
pose.Pose.Position = matrix.GetTranslation();
// Level the pose
float yaw;
quat.GetYawPitchRoll(&yaw, nullptr, nullptr);
pose.LeveledPose.Orientation = OVR::Quatf(OVR::Axis_Y, yaw);
pose.LeveledPose.Position = matrix.GetTranslation();
return pose;
}
示例5:
OVR::Matrix4f vx_ovr_namespace_::OVRHMDHandleWithDevice::getViewMatrix(ovrEyeType eye, float pos_x, float pos_y, float pos_z, float yaw) const
{
auto height = ovr_GetFloat(session_, OVR_KEY_EYE_HEIGHT, 1.8f);
OVR::Matrix4f rollPitchYaw = OVR::Matrix4f::RotationY(yaw);
OVR::Matrix4f finalRollPitchYaw = rollPitchYaw * OVR::Matrix4f(eyeRenderPosef_[eye].Orientation);
OVR::Vector3f finalUp = finalRollPitchYaw.Transform(OVR::Vector3f(0.0, 1.0, 0.0));
OVR::Vector3f finalForward = finalRollPitchYaw.Transform(OVR::Vector3f(0.0, 0.0, -1.0));
OVR::Vector3f shiftedEyePos = OVR::Vector3f(pos_x, pos_y + height, pos_z) + rollPitchYaw.Transform(eyeRenderPosef_[eye].Position);
return OVR::Matrix4f::LookAtRH(shiftedEyePos, shiftedEyePos + finalForward, finalUp);
}
示例6:
glm::mat4 CameraOvr::getOrientation(OVR::Quatf orientationQuat, const OVR::Util::Render::StereoEyeParams& eyeParams) {
orientationQuat.GetEulerAngles<OVR::Axis_Y, OVR::Axis_X, OVR::Axis_Z>(&_hmdRy, &_hmdRx, &_hmdRz);
OVR::Matrix4f orientation = OVR::Matrix4f::RotationY(_hmdRy+_ry)
* OVR::Matrix4f::RotationX(_hmdRx+_rx)
* OVR::Matrix4f::RotationZ(_hmdRz+_rz);
OVR::Matrix4f view = orientation.Inverted() * eyeParams.ViewAdjust;
return ovrToGlmMat4(view);
}
示例7: _StoreHmdPose
// Store HMD position and direction for gaze tracking in timestep.
// OVR SDK requires head pose be queried between ovrHmd_BeginFrameTiming and ovrHmd_EndFrameTiming.
void RiftAppSkeleton::_StoreHmdPose(const ovrPosef& eyePose)
{
m_hmdRo.x = eyePose.Position.x + m_chassisPos.x;
m_hmdRo.y = eyePose.Position.y + m_chassisPos.y;
m_hmdRo.z = eyePose.Position.z + m_chassisPos.z;
const OVR::Matrix4f rotmtx = OVR::Matrix4f::RotationY(-m_chassisYaw) // Not sure why negative...
* OVR::Matrix4f(eyePose.Orientation);
const OVR::Vector4f rotvec = rotmtx.Transform(OVR::Vector4f(0.0f, 0.0f, -1.0f, 0.0f));
m_hmdRd.x = rotvec.x;
m_hmdRd.y = rotvec.y;
m_hmdRd.z = rotvec.z;
}
示例8: RenderThumbnails
void RiftAppSkeleton::RenderThumbnails()
{
std::vector<Pane*>& panes = m_paneScene.m_panes;
for (std::vector<Pane*>::iterator it = panes.begin();
it != panes.end();
++it)
{
ShaderPane* pP = reinterpret_cast<ShaderPane*>(*it);
if (pP == NULL)
continue;
ShaderToy* pSt = pP->m_pShadertoy;
// Render a view of the shader to the FBO
// We must keep the previously bound FBO and restore
GLint bound_fbo = 0;
glGetIntegerv(GL_FRAMEBUFFER_BINDING, &bound_fbo);
bindFBO(pP->m_paneRenderBuffer);
//pP->DrawToFBO();
{
const glm::vec3 hp = pSt->GetHeadPos();
const glm::vec3 LookVec(0.0f, 0.0f, -1.0f);
const glm::vec3 up(0.0f, 1.0f, 0.0f);
ovrPosef eyePose;
eyePose.Orientation = OVR::Quatf();
eyePose.Position = OVR::Vector3f();
const OVR::Matrix4f view = _MakeModelviewMatrix(
eyePose,
OVR::Vector3f(0.0f),
static_cast<float>(M_PI),
OVR::Vector3f(hp.x, hp.y, hp.z));
const glm::mat4 persp = glm::perspective(
90.0f,
static_cast<float>(pP->m_paneRenderBuffer.w) / static_cast<float>(pP->m_paneRenderBuffer.h),
0.004f,
500.0f);
const bool wasDrawing = m_shaderToyScene.m_bDraw;
m_shaderToyScene.m_bDraw = true;
m_shaderToyScene.SetShaderToy(pSt);
m_shaderToyScene.RenderForOneEye(&view.Transposed().M[0][0], glm::value_ptr(persp));
m_shaderToyScene.m_bDraw = wasDrawing;
m_shaderToyScene.SetShaderToy(NULL);
}
unbindFBO();
glBindFramebufferEXT(GL_FRAMEBUFFER_EXT, bound_fbo);
}
}
示例9: Entity_DrawChildren
void Entity_DrawChildren( const OVR::Matrix4f &view, const SxTransform& xform, SRef first )
{
SRef ref;
SEntity *entity;
SxTransform entityXform;
SxTransform childXform;
OVR::Matrix4f m;
for ( ref = first; ref != S_NULL_REF; ref = entity->parentLink.next )
{
entity = Registry_GetEntity( ref );
assert( entity );
if ( entity->visibility <= 0.0f )
continue;
OrientationToTransform( entity->orientation, &entityXform );
ConcatenateTransforms( xform, entityXform, &childXform );
// if ( strstr( entity->id, "vnc" ) )
// {
// S_Log( "entityXform %s:", entity->id );
// S_Log( "xAxis: %f %f %f", entityXform.axes.x.x, entityXform.axes.x.y, entityXform.axes.x.z );
// S_Log( "yAxis: %f %f %f", entityXform.axes.y.x, entityXform.axes.y.y, entityXform.axes.y.z );
// S_Log( "zAxis: %f %f %f", entityXform.axes.z.x, entityXform.axes.z.y, entityXform.axes.z.z );
// S_Log( "origin: %f %f %f", entityXform.origin.x, entityXform.origin.y, entityXform.origin.z );
// S_Log( "scale: %f %f %f", entityXform.scale.x, entityXform.scale.y, entityXform.scale.z );
// S_Log( "childXform %s:", entity->id );
// S_Log( "xAxis: %f %f %f", childXform.axes.x.x, childXform.axes.x.y, childXform.axes.x.z );
// S_Log( "yAxis: %f %f %f", childXform.axes.y.x, childXform.axes.y.y, childXform.axes.y.z );
// S_Log( "zAxis: %f %f %f", childXform.axes.z.x, childXform.axes.z.y, childXform.axes.z.z );
// S_Log( "origin: %f %f %f", childXform.origin.x, childXform.origin.y, childXform.origin.z );
// S_Log( "scale: %f %f %f", childXform.scale.x, childXform.scale.y, childXform.scale.z );
// }
m = OVR::Matrix4f(
childXform.axes.x.x * childXform.scale.x, childXform.axes.x.y * childXform.scale.x, childXform.axes.x.z * childXform.scale.x, 0.0f,
childXform.axes.y.x * childXform.scale.y, childXform.axes.y.y * childXform.scale.y, childXform.axes.y.z * childXform.scale.y, 0.0f,
childXform.axes.z.x * childXform.scale.z, childXform.axes.z.y * childXform.scale.z, childXform.axes.z.z * childXform.scale.z, 0.0f,
childXform.origin.x, childXform.origin.y, childXform.origin.z, 1.0f );
Entity_DrawEntity( entity, view * m.Transposed() );
if ( entity->firstChild != S_NULL_REF )
{
// S_Log( "%s has children", entity->id );
Entity_DrawChildren( view, childXform, entity->firstChild );
}
}
}
示例10: _initPresentFbo
void RiftAppSkeleton::_initPresentFbo()
{
m_presentFbo.bindVAO();
const float verts[] = {
-1, -1,
1, -1,
1, 1,
-1, 1
};
const float texs[] = {
0, 0,
1, 0,
1, 1,
0, 1,
};
GLuint vertVbo = 0;
glGenBuffers(1, &vertVbo);
m_presentFbo.AddVbo("vPosition", vertVbo);
glBindBuffer(GL_ARRAY_BUFFER, vertVbo);
glBufferData(GL_ARRAY_BUFFER, 4*2*sizeof(GLfloat), verts, GL_STATIC_DRAW);
glVertexAttribPointer(m_presentFbo.GetAttrLoc("vPosition"), 2, GL_FLOAT, GL_FALSE, 0, NULL);
GLuint texVbo = 0;
glGenBuffers(1, &texVbo);
m_presentFbo.AddVbo("vTex", texVbo);
glBindBuffer(GL_ARRAY_BUFFER, texVbo);
glBufferData(GL_ARRAY_BUFFER, 4*2*sizeof(GLfloat), texs, GL_STATIC_DRAW);
glVertexAttribPointer(m_presentFbo.GetAttrLoc("vTex"), 2, GL_FLOAT, GL_FALSE, 0, NULL);
glEnableVertexAttribArray(m_presentFbo.GetAttrLoc("vPosition"));
glEnableVertexAttribArray(m_presentFbo.GetAttrLoc("vTex"));
glUseProgram(m_presentFbo.prog());
{
OVR::Matrix4f id = OVR::Matrix4f::Identity();
glUniformMatrix4fv(m_presentFbo.GetUniLoc("mvmtx"), 1, false, &id.Transposed().M[0][0]);
glUniformMatrix4fv(m_presentFbo.GetUniLoc("prmtx"), 1, false, &id.Transposed().M[0][0]);
}
glUseProgram(0);
glBindVertexArray(0);
}
示例11:
OVR_PUBLIC_FUNCTION(ovrTrackingState) ovr_GetTrackingState(ovrSession session, double absTime, ovrBool latencyMarker)
{
ovrTrackingState state = { 0 };
if (!session)
return state;
// Gain focus for the compositor
float time = (float)ovr_GetTimeInSeconds();
// Get the absolute tracking poses
vr::TrackedDevicePose_t* poses = session->poses;
// Convert the head pose
state.HeadPose = REV_TrackedDevicePoseToOVRPose(poses[vr::k_unTrackedDeviceIndex_Hmd], time);
state.StatusFlags = REV_TrackedDevicePoseToOVRStatusFlags(poses[vr::k_unTrackedDeviceIndex_Hmd]);
// Convert the hand poses
vr::TrackedDeviceIndex_t hands[] = { g_VRSystem->GetTrackedDeviceIndexForControllerRole(vr::TrackedControllerRole_LeftHand),
g_VRSystem->GetTrackedDeviceIndexForControllerRole(vr::TrackedControllerRole_RightHand) };
for (int i = 0; i < ovrHand_Count; i++)
{
vr::TrackedDeviceIndex_t deviceIndex = hands[i];
if (deviceIndex == vr::k_unTrackedDeviceIndexInvalid)
{
state.HandPoses[i].ThePose = OVR::Posef::Identity();
continue;
}
state.HandPoses[i] = REV_TrackedDevicePoseToOVRPose(poses[deviceIndex], time);
state.HandStatusFlags[i] = REV_TrackedDevicePoseToOVRStatusFlags(poses[deviceIndex]);
}
OVR::Matrix4f origin = REV_HmdMatrixToOVRMatrix(g_VRSystem->GetSeatedZeroPoseToStandingAbsoluteTrackingPose());
// The calibrated origin should be the location of the seated origin relative to the absolute tracking space.
// It currently describes the location of the absolute origin relative to the seated origin, so we have to invert it.
origin.Invert();
state.CalibratedOrigin.Orientation = OVR::Quatf(origin);
state.CalibratedOrigin.Position = origin.GetTranslation();
return state;
}
示例12:
/// Scale the parallax translation and head pose motion vector by the head size
/// dictated by the shader. Thanks to the elegant design decision of putting the
/// head's default position at the origin, this is simple.
OVR::Matrix4f _MakeModelviewMatrix(
ovrPosef eyePose,
ovrVector3f viewAdjust,
float chassisYaw,
ovrVector3f chassisPos,
float headScale=1.0f)
{
const OVR::Matrix4f eyePoseMatrix =
OVR::Matrix4f::Translation(OVR::Vector3f(eyePose.Position) * headScale)
* OVR::Matrix4f(OVR::Quatf(eyePose.Orientation));
const OVR::Matrix4f view =
OVR::Matrix4f::Translation(OVR::Vector3f(viewAdjust) * headScale)
* eyePoseMatrix.Inverted()
* OVR::Matrix4f::RotationY(chassisYaw)
* OVR::Matrix4f::Translation(-OVR::Vector3f(chassisPos));
return view;
}
示例13: sizeof
// This function calculates the transformation Matrix, needed for the Oculus Rift display
glm::mat4 RetinaManager::CalcTransMatrix(ovrEyeType Eye) {
glm::mat4 projMat;
glm::mat4 modelViewMat;
// Get Projection and ModelView matrices from the device
OVR::Matrix4f projectionMatrix = ovrMatrix4f_Projection(this->eyeRenderDesc[Eye].Fov, 0.3f, 1000.0f, true);
// Convert the matrices into OpenGl form
memcpy(glm::value_ptr(projMat), &(projectionMatrix.Transposed().M[0][0]), sizeof(projectionMatrix));
modelViewMat = glm::mat4(1.0); //Identity matrix for model-view
// Adjust IPD and the distance from FOV
glm::mat4 translateIPD = glm::translate(glm::mat4(1.0),
glm::vec3(this->eyeRenderDesc[Eye].ViewAdjust.x, this->eyeRenderDesc[Eye].ViewAdjust.y,
this->eyeRenderDesc[Eye].ViewAdjust.z));
glm::mat4 translateBack = glm::translate(glm::mat4(1.0),
glm::vec3(0, 0, this->paramManager.getTranslateBackOffset()));
// Calc and Return the transformed Mat
return projMat * modelViewMat * translateBack * translateIPD;;
}
示例14: REV_TrackedDevicePoseToOVRPose
ovrPoseStatef REV_TrackedDevicePoseToOVRPose(vr::TrackedDevicePose_t pose, double time)
{
ovrPoseStatef result = { 0 };
result.ThePose = OVR::Posef::Identity();
OVR::Matrix4f matrix;
if (pose.bPoseIsValid)
matrix = REV_HmdMatrixToOVRMatrix(pose.mDeviceToAbsoluteTracking);
else
return result;
result.ThePose.Orientation = OVR::Quatf(matrix);
result.ThePose.Position = matrix.GetTranslation();
result.AngularVelocity = REV_HmdVectorToOVRVector(pose.vAngularVelocity);
result.LinearVelocity = REV_HmdVectorToOVRVector(pose.vVelocity);
// TODO: Calculate acceleration.
result.AngularAcceleration = ovrVector3f();
result.LinearAcceleration = ovrVector3f();
result.TimeInSeconds = time;
return result;
}
示例15: AccumulateInputs
/// Handle input's influence on orientation variables.
void OculusAppSkeleton::AccumulateInputs(float dt)
{
// Handle Sensor motion.
// We extract Yaw, Pitch, Roll instead of directly using the orientation
// to allow "additional" yaw manipulation with mouse/controller.
if (m_ok.SensorActive())
{
OVR::Quatf hmdOrient = m_ok.GetOrientation();
float yaw = 0.0f;
hmdOrient.GetEulerAngles<OVR::Axis_Y, OVR::Axis_X, OVR::Axis_Z>(&yaw, &EyePitch, &EyeRoll);
EyeYaw += (yaw - LastSensorYaw);
LastSensorYaw = yaw;
}
// Gamepad rotation.
EyeYaw -= GamepadRotate.x * dt;
if (!m_ok.SensorActive())
{
// Allow gamepad to look up/down, but only if there is no Rift sensor.
EyePitch -= GamepadRotate.y * dt;
EyePitch -= MouseRotate.y * dt;
EyeYaw -= MouseRotate.x * dt;
const float maxPitch = ((3.1415f/2)*0.98f);
if (EyePitch > maxPitch)
EyePitch = maxPitch;
if (EyePitch < -maxPitch)
EyePitch = -maxPitch;
}
if (GamepadMove.LengthSq() > 0)
{
OVR::Matrix4f yawRotate = OVR::Matrix4f::RotationY(EyeYaw);
OVR::Vector3f orientationVector = yawRotate.Transform(GamepadMove);
orientationVector *= MoveSpeed * dt;
EyePos += orientationVector;
}
if (MouseMove.LengthSq() > 0)
{
OVR::Matrix4f yawRotate = OVR::Matrix4f::RotationY(EyeYaw);
OVR::Vector3f orientationVector = yawRotate.Transform(MouseMove);
orientationVector *= MoveSpeed * dt;
EyePos += orientationVector;
}
if (KeyboardMove.LengthSq() > 0)
{
OVR::Matrix4f yawRotate = OVR::Matrix4f::RotationY(EyeYaw);
OVR::Vector3f orientationVector = yawRotate.Transform(KeyboardMove);
orientationVector *= MoveSpeed * dt;
EyePos += orientationVector;
}
}