本文整理汇总了C++中MyAvatar::getSensorToWorldMatrix方法的典型用法代码示例。如果您正苦于以下问题:C++ MyAvatar::getSensorToWorldMatrix方法的具体用法?C++ MyAvatar::getSensorToWorldMatrix怎么用?C++ MyAvatar::getSensorToWorldMatrix使用的例子?那么, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类MyAvatar
的用法示例。
在下文中一共展示了MyAvatar::getSensorToWorldMatrix方法的6个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的C++代码示例。
示例1: update
void OverlayConductor::update(float dt) {
updateMode();
switch (_mode) {
case SITTING: {
// when sitting, the overlay is at the origin, facing down the -z axis.
// the camera is taken directly from the HMD.
Transform identity;
qApp->getApplicationCompositor().setModelTransform(identity);
qApp->getApplicationCompositor().setCameraBaseTransform(identity);
break;
}
case STANDING: {
// when standing, the overlay is at a reference position, which is set when the overlay is
// enabled. The camera is taken directly from the HMD, but in world space.
// So the sensorToWorldMatrix must be applied.
MyAvatar* myAvatar = DependencyManager::get<AvatarManager>()->getMyAvatar();
Transform t;
t.evalFromRawMatrix(myAvatar->getSensorToWorldMatrix());
qApp->getApplicationCompositor().setCameraBaseTransform(t);
// detect when head moves out side of sweet spot, or looks away.
mat4 headMat = myAvatar->getSensorToWorldMatrix() * qApp->getHMDSensorPose();
vec3 headWorldPos = extractTranslation(headMat);
vec3 headForward = glm::quat_cast(headMat) * glm::vec3(0.0f, 0.0f, -1.0f);
Transform modelXform = qApp->getApplicationCompositor().getModelTransform();
vec3 compositorWorldPos = modelXform.getTranslation();
vec3 compositorForward = modelXform.getRotation() * glm::vec3(0.0f, 0.0f, -1.0f);
const float MAX_COMPOSITOR_DISTANCE = 0.6f;
const float MAX_COMPOSITOR_ANGLE = 110.0f;
if (_enabled && (glm::distance(headWorldPos, compositorWorldPos) > MAX_COMPOSITOR_DISTANCE ||
glm::dot(headForward, compositorForward) < cosf(glm::radians(MAX_COMPOSITOR_ANGLE)))) {
// fade out the overlay
setEnabled(false);
}
break;
}
case FLAT:
// do nothing
break;
}
}
示例2: getCameraOrientation
glm::quat Head::getCameraOrientation() const {
// NOTE: Head::getCameraOrientation() is not used for orienting the camera "view" while in Oculus mode, so
// you may wonder why this code is here. This method will be called while in Oculus mode to determine how
// to change the driving direction while in Oculus mode. It is used to support driving toward where you're
// head is looking. Note that in oculus mode, your actual camera view and where your head is looking is not
// always the same.
if (qApp->getAvatarUpdater()->isHMDMode()) {
MyAvatar* myAvatar = dynamic_cast<MyAvatar*>(_owningAvatar);
if (myAvatar && myAvatar->getStandingHMDSensorMode()) {
return glm::quat_cast(myAvatar->getSensorToWorldMatrix()) * myAvatar->getHMDSensorOrientation();
} else {
return getOrientation();
}
} else {
Avatar* owningAvatar = static_cast<Avatar*>(_owningAvatar);
return owningAvatar->getWorldAlignedOrientation() * glm::quat(glm::radians(glm::vec3(_basePitch, 0.0f, 0.0f)));
}
}
示例3: setEnabled
void OverlayConductor::setEnabled(bool enabled) {
if (enabled == _enabled) {
return;
}
Menu::getInstance()->setIsOptionChecked(MenuOption::Overlays, enabled);
_enabled = enabled; // set the new value
// if the new state is visible/enabled...
if (_enabled) {
// alpha fadeIn the overlay mesh.
qApp->getApplicationCompositor().fadeIn();
// enable mouse clicks from script
qApp->getOverlays().enable();
// enable QML events
auto offscreenUi = DependencyManager::get<OffscreenUi>();
offscreenUi->getRootItem()->setEnabled(true);
if (_mode == STANDING) {
// place the overlay at the current hmd position in world space
MyAvatar* myAvatar = DependencyManager::get<AvatarManager>()->getMyAvatar();
auto camMat = cancelOutRollAndPitch(myAvatar->getSensorToWorldMatrix() * qApp->getHMDSensorPose());
Transform t;
t.setTranslation(extractTranslation(camMat));
t.setRotation(glm::quat_cast(camMat));
qApp->getApplicationCompositor().setModelTransform(t);
}
} else { // other wise, if the new state is hidden/not enabled
// alpha fadeOut the overlay mesh.
qApp->getApplicationCompositor().fadeOut();
// disable mouse clicks from script
qApp->getOverlays().disable();
// disable QML events
auto offscreenUi = DependencyManager::get<OffscreenUi>();
offscreenUi->getRootItem()->setEnabled(false);
}
}
示例4: updateMode
void OverlayConductor::updateMode() {
Mode newMode;
if (qApp->isHMDMode()) {
newMode = SITTING;
} else {
newMode = FLAT;
}
if (newMode != _mode) {
switch (newMode) {
case SITTING: {
// enter the SITTING state
// place the overlay at origin
Transform identity;
qApp->getApplicationCompositor().setModelTransform(identity);
break;
}
case STANDING: {
// enter the STANDING state
// place the overlay at the current hmd position in world space
MyAvatar* myAvatar = DependencyManager::get<AvatarManager>()->getMyAvatar();
auto camMat = cancelOutRollAndPitch(myAvatar->getSensorToWorldMatrix() * qApp->getHMDSensorPose());
Transform t;
t.setTranslation(extractTranslation(camMat));
t.setRotation(glm::quat_cast(camMat));
qApp->getApplicationCompositor().setModelTransform(t);
break;
}
case FLAT:
// do nothing
break;
}
}
_mode = newMode;
}
示例5:
glm::mat4 HMDScriptingInterface::getWorldHMDMatrix() const {
MyAvatar* myAvatar = DependencyManager::get<AvatarManager>()->getMyAvatar();
return myAvatar->getSensorToWorldMatrix() * myAvatar->getHMDSensorMatrix();
}
示例6: updateRig
// Called within Model::simulate call, below.
void SkeletonModel::updateRig(float deltaTime, glm::mat4 parentTransform) {
const FBXGeometry& geometry = getFBXGeometry();
Head* head = _owningAvatar->getHead();
if (_owningAvatar->isMyAvatar()) {
MyAvatar* myAvatar = static_cast<MyAvatar*>(_owningAvatar);
Rig::HeadParameters headParams;
headParams.enableLean = qApp->isHMDMode();
headParams.leanSideways = head->getFinalLeanSideways();
headParams.leanForward = head->getFinalLeanForward();
headParams.torsoTwist = head->getTorsoTwist();
if (qApp->isHMDMode()) {
headParams.isInHMD = true;
// get HMD position from sensor space into world space, and back into rig space
glm::mat4 worldHMDMat = myAvatar->getSensorToWorldMatrix() * myAvatar->getHMDSensorMatrix();
glm::mat4 rigToWorld = createMatFromQuatAndPos(getRotation(), getTranslation());
glm::mat4 worldToRig = glm::inverse(rigToWorld);
glm::mat4 rigHMDMat = worldToRig * worldHMDMat;
headParams.rigHeadPosition = extractTranslation(rigHMDMat);
headParams.rigHeadOrientation = extractRotation(rigHMDMat);
headParams.worldHeadOrientation = extractRotation(worldHMDMat);
} else {
headParams.isInHMD = false;
// We don't have a valid localHeadPosition.
headParams.rigHeadOrientation = Quaternions::Y_180 * head->getFinalOrientationInLocalFrame();
headParams.worldHeadOrientation = head->getFinalOrientationInWorldFrame();
}
headParams.leanJointIndex = geometry.leanJointIndex;
headParams.neckJointIndex = geometry.neckJointIndex;
headParams.isTalking = head->getTimeWithoutTalking() <= 1.5f;
_rig->updateFromHeadParameters(headParams, deltaTime);
Rig::HandParameters handParams;
auto leftPose = myAvatar->getLeftHandControllerPoseInAvatarFrame();
if (leftPose.isValid()) {
handParams.isLeftEnabled = true;
handParams.leftPosition = Quaternions::Y_180 * leftPose.getTranslation();
handParams.leftOrientation = Quaternions::Y_180 * leftPose.getRotation();
} else {
handParams.isLeftEnabled = false;
}
auto rightPose = myAvatar->getRightHandControllerPoseInAvatarFrame();
if (rightPose.isValid()) {
handParams.isRightEnabled = true;
handParams.rightPosition = Quaternions::Y_180 * rightPose.getTranslation();
handParams.rightOrientation = Quaternions::Y_180 * rightPose.getRotation();
} else {
handParams.isRightEnabled = false;
}
handParams.bodyCapsuleRadius = myAvatar->getCharacterController()->getCapsuleRadius();
handParams.bodyCapsuleHalfHeight = myAvatar->getCharacterController()->getCapsuleHalfHeight();
handParams.bodyCapsuleLocalOffset = myAvatar->getCharacterController()->getCapsuleLocalOffset();
_rig->updateFromHandParameters(handParams, deltaTime);
Rig::CharacterControllerState ccState = convertCharacterControllerState(myAvatar->getCharacterController()->getState());
auto velocity = myAvatar->getLocalVelocity();
auto position = myAvatar->getLocalPosition();
auto orientation = myAvatar->getLocalOrientation();
_rig->computeMotionAnimationState(deltaTime, position, velocity, orientation, ccState);
// evaluate AnimGraph animation and update jointStates.
Model::updateRig(deltaTime, parentTransform);
Rig::EyeParameters eyeParams;
eyeParams.worldHeadOrientation = headParams.worldHeadOrientation;
eyeParams.eyeLookAt = head->getLookAtPosition();
eyeParams.eyeSaccade = head->getSaccade();
eyeParams.modelRotation = getRotation();
eyeParams.modelTranslation = getTranslation();
eyeParams.leftEyeJointIndex = geometry.leftEyeJointIndex;
eyeParams.rightEyeJointIndex = geometry.rightEyeJointIndex;
_rig->updateFromEyeParameters(eyeParams);
} else {
Model::updateRig(deltaTime, parentTransform);
// This is a little more work than we really want.
//
// Other avatars joint, including their eyes, should already be set just like any other joints
// from the wire data. But when looking at me, we want the eyes to use the corrected lookAt.
//
// Thus this should really only be ... else if (_owningAvatar->getHead()->isLookingAtMe()) {...
// However, in the !isLookingAtMe case, the eyes aren't rotating the way they should right now.
// We will revisit that as priorities allow, and particularly after the new rig/animation/joints.
//.........这里部分代码省略.........