本文整理汇总了C++中MyAvatar::getLaserPointerTipPosition方法的典型用法代码示例。如果您正苦于以下问题:C++ MyAvatar::getLaserPointerTipPosition方法的具体用法?C++ MyAvatar::getLaserPointerTipPosition怎么用?C++ MyAvatar::getLaserPointerTipPosition使用的例子?那么, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类MyAvatar
的用法示例。
在下文中一共展示了MyAvatar::getLaserPointerTipPosition方法的4个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的C++代码示例。
示例1: getPalmClickLocation
//Caculate the click location using one of the sixense controllers. Scale is not applied
QPoint ApplicationCompositor::getPalmClickLocation(const PalmData *palm) const {
QPoint rv;
auto canvasSize = qApp->getCanvasSize();
if (qApp->isHMDMode()) {
glm::vec2 polar = getPolarCoordinates(*palm);
glm::vec2 point = sphericalToScreen(-polar);
rv.rx() = point.x;
rv.ry() = point.y;
} else {
MyAvatar* myAvatar = DependencyManager::get<AvatarManager>()->getMyAvatar();
glm::dmat4 projection;
qApp->getProjectionMatrix(&projection);
glm::quat invOrientation = glm::inverse(myAvatar->getOrientation());
glm::vec3 eyePos = myAvatar->getDefaultEyePosition();
glm::vec3 tip = myAvatar->getLaserPointerTipPosition(palm);
glm::vec3 tipPos = invOrientation * (tip - eyePos);
glm::vec4 clipSpacePos = glm::vec4(projection * glm::dvec4(tipPos, 1.0));
glm::vec3 ndcSpacePos;
if (clipSpacePos.w != 0) {
ndcSpacePos = glm::vec3(clipSpacePos) / clipSpacePos.w;
}
rv.setX(((ndcSpacePos.x + 1.0) / 2.0) * canvasSize.x);
rv.setY((1.0 - ((ndcSpacePos.y + 1.0) / 2.0)) * canvasSize.y);
}
return rv;
}
示例2: renderPointersOculus
void ApplicationOverlay::renderPointersOculus(const glm::vec3& eyePos) {
glBindTexture(GL_TEXTURE_2D, _crosshairTexture);
glDisable(GL_DEPTH_TEST);
glMatrixMode(GL_MODELVIEW);
//Controller Pointers
MyAvatar* myAvatar = Application::getInstance()->getAvatar();
for (int i = 0; i < (int)myAvatar->getHand()->getNumPalms(); i++) {
PalmData& palm = myAvatar->getHand()->getPalms()[i];
if (palm.isActive()) {
glm::vec3 tip = myAvatar->getLaserPointerTipPosition(&palm);
glm::vec3 tipDirection = glm::normalize(glm::inverse(myAvatar->getOrientation()) * (tip - eyePos));
float pitch = -glm::asin(tipDirection.y);
float yawSign = glm::sign(-tipDirection.x);
float yaw = glm::acos(-tipDirection.z) *
((yawSign == 0.0f) ? 1.0f : yawSign);
glm::quat orientation = glm::quat(glm::vec3(pitch, yaw, 0.0f));
renderReticle(orientation, _alpha);
}
}
//Mouse Pointer
if (_reticleActive[MOUSE]) {
glm::vec2 projection = screenToSpherical(glm::vec2(_reticlePosition[MOUSE].x(),
_reticlePosition[MOUSE].y()));
glm::quat orientation(glm::vec3(-projection.y, projection.x, 0.0f));
renderReticle(orientation, _alpha);
}
glEnable(GL_DEPTH_TEST);
}
示例3: getPalmClickLocation
//Caculate the click location using one of the sixense controllers. Scale is not applied
QPoint ApplicationOverlay::getPalmClickLocation(const PalmData *palm) const {
Application* application = Application::getInstance();
GLCanvas* glWidget = application->getGLWidget();
MyAvatar* myAvatar = application->getAvatar();
glm::vec3 tip = myAvatar->getLaserPointerTipPosition(palm);
glm::vec3 eyePos = myAvatar->getHead()->getEyePosition();
glm::quat invOrientation = glm::inverse(myAvatar->getOrientation());
//direction of ray goes towards camera
glm::vec3 dir = invOrientation * glm::normalize(application->getCamera()->getPosition() - tip);
glm::vec3 tipPos = invOrientation * (tip - eyePos);
QPoint rv;
if (OculusManager::isConnected()) {
float t;
//We back the ray up by dir to ensure that it will not start inside the UI.
glm::vec3 adjustedPos = tipPos - dir;
//Find intersection of crosshair ray.
if (raySphereIntersect(dir, adjustedPos, _oculusUIRadius * myAvatar->getScale(), &t)){
glm::vec3 collisionPos = adjustedPos + dir * t;
//Normalize it in case its not a radius of 1
collisionPos = glm::normalize(collisionPos);
//If we hit the back hemisphere, mark it as not a collision
if (collisionPos.z > 0) {
rv.setX(INT_MAX);
rv.setY(INT_MAX);
} else {
float u = asin(collisionPos.x) / (_textureFov)+0.5f;
float v = 1.0 - (asin(collisionPos.y) / (_textureFov)+0.5f);
rv.setX(u * glWidget->width());
rv.setY(v * glWidget->height());
}
} else {
//if they did not click on the overlay, just set the coords to INT_MAX
rv.setX(INT_MAX);
rv.setY(INT_MAX);
}
} else {
glm::dmat4 projection;
application->getProjectionMatrix(&projection);
glm::vec4 clipSpacePos = glm::vec4(projection * glm::dvec4(tipPos, 1.0));
glm::vec3 ndcSpacePos;
if (clipSpacePos.w != 0) {
ndcSpacePos = glm::vec3(clipSpacePos) / clipSpacePos.w;
}
rv.setX(((ndcSpacePos.x + 1.0) / 2.0) * glWidget->width());
rv.setY((1.0 - ((ndcSpacePos.y + 1.0) / 2.0)) * glWidget->height());
}
return rv;
}
示例4: getPolarCoordinates
vec2 getPolarCoordinates(const PalmData& palm) {
MyAvatar* myAvatar = DependencyManager::get<AvatarManager>()->getMyAvatar();
auto avatarOrientation = myAvatar->getOrientation();
auto eyePos = myAvatar->getDefaultEyePosition();
glm::vec3 tip = myAvatar->getLaserPointerTipPosition(&palm);
// Direction of the tip relative to the eye
glm::vec3 tipDirection = tip - eyePos;
// orient into avatar space
tipDirection = glm::inverse(avatarOrientation) * tipDirection;
// Normalize for trig functions
tipDirection = glm::normalize(tipDirection);
// Convert to polar coordinates
glm::vec2 polar(glm::atan(tipDirection.x, -tipDirection.z), glm::asin(tipDirection.y));
return polar;
}