本文整理汇总了C++中Mat4::inverse方法的典型用法代码示例。如果您正苦于以下问题:C++ Mat4::inverse方法的具体用法?C++ Mat4::inverse怎么用?C++ Mat4::inverse使用的例子?那么, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类Mat4
的用法示例。
在下文中一共展示了Mat4::inverse方法的12个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的C++代码示例。
示例1: quadrix_find_best_fit
bool quadrix_find_best_fit(const Mat4& Q, Vec3& candidate)
{
Mat4 K = Q;
K(3,0) = K(3,1) = K(3,2) = 0.0; K(3,3) = 1;
Mat4 M;
real det = K.inverse(M);
if( FEQ(det, 0.0, 1e-12) )
return false;
#ifdef SAFETY
//
// The homogeneous division SHOULDN'T be necessary.
// But, when we're being SAFE, we do it anyway just in case.
//
candidate[X] = M(0,3)/M(3,3);
candidate[Y] = M(1,3)/M(3,3);
candidate[Z] = M(2,3)/M(3,3);
#else
candidate[X] = M(0,3);
candidate[Y] = M(1,3);
candidate[Z] = M(2,3);
#endif
return true;
}
示例2: calc_projective
void calc_projective (const std::vector<double>& frame_ts,
const std::vector<Vec4>& gyro_quat,
const std::vector<Vec3>& acc_trans,
const std::vector<double>& gyro_ts,
CalibrationParams calib,
std::vector<Mat4>& projective)
{
int index0 = 0;
int index1 = 0;
size_t frame_count = frame_ts.size();
for (int fid = 0; fid < frame_count; fid++) {
const double ts0 = frame_ts[fid] + calib.gyro_delay;
Quatern quat0 = interp_gyro_quatern(ts0, gyro_quat, gyro_ts, index0) + Quatern(calib.gyro_drift);
const double ts1 = frame_ts[fid + 1] + calib.gyro_delay;
Quatern quat1 = interp_gyro_quatern(ts1, gyro_quat, gyro_ts, index1) + Quatern(calib.gyro_drift);
Vec3 trans0 = acc_trans[fid];
Vec3 trans1 = acc_trans[fid + 1];
Mat4 extr0 = calc_extrinsic(quat0, trans0);
Mat4 extr1 = calc_extrinsic(quat1, trans1);
Mat3 intr = calc_intrinsic(calib.fx, calib.fy, calib.cx, calib.cy, calib.skew);
Mat4 intrinsic = Mat4(Vec4(intr.v0, 0),
Vec4(intr.v1, 0),
Vec4(intr.v2, 0),
Vec4(0, 0, 0, 1));
projective[fid] = intrinsic * extr0 * extr1.transpose() * intrinsic.inverse();
}
}
示例3: update
void Geometry::update(u32 delta)
{
char szPath[MAX_PATH] = {0};
char szPath2[MAX_PATH];
while(mAniTime.current > mAniTime.end)
{
mAniTime.current -= mAniTime.end;
}
if (mSkin && mSkeleton)
{
mSkeleton->update(mAniTime, *mSkin);
}
if (!mStopAimation)
{
mAniTime.current += delta * m_speed;;
}
//
Vec3 speed(0.000, 0.000, 0.0);
Quaternion q(0, 0, 0, 0.000*mAniTime.current);
Mat4 tQ(q);
Mat4 tT = Mat4::IDENTITY;
Vec3 offsetMatrix(-0.5, -0.5, 0.0);
tT.setTrans(offsetMatrix);
mUVMatrix = tT.inverse() * tQ * tT;
//
mMaterial->update(delta);
}
示例4: setUniformsForBuiltins
void GLProgram::setUniformsForBuiltins(const Mat4 &matrixM)
{
auto& vp = _director->getVPMat();
if (_flags.usesM)
setUniformLocationWithMatrix4fv(_builtInUniforms[UNIFORM_M_MATRIX], matrixM.m, 1);
if (_flags.usesV)
setUniformLocationWithMatrix4fv(_builtInUniforms[UNIFORM_V_MATRIX], vp.view.m, 1);
if (_flags.usesP)
setUniformLocationWithMatrix4fv(_builtInUniforms[UNIFORM_P_MATRIX], vp.projection.m, 1);
if (_flags.usesVP)
setUniformLocationWithMatrix4fv(_builtInUniforms[UNIFORM_VP_MATRIX], vp.viewProjection.m, 1);
if (_flags.usesMV) {
Mat4 matrixMV = vp.view * matrixM;
setUniformLocationWithMatrix4fv(_builtInUniforms[UNIFORM_MV_MATRIX], matrixMV.m, 1);
}
if (_flags.usesEyePosition)
{
auto pos = Camera::getVisitingCamera()->getEyePosition();
setUniformLocationWith3f(_builtInUniforms[UNIFORM_EYE_POSITION], pos.x, pos.y, pos.z);
}
if (_flags.usesMVP) {
Mat4 matrixMVP = vp.viewProjection * matrixM;
setUniformLocationWithMatrix4fv(_builtInUniforms[UNIFORM_MVP_MATRIX], matrixMVP.m, 1);
}
if (_flags.usesNormal)
{
Mat4 mvInverse = matrixM;
mvInverse.m[12] = mvInverse.m[13] = mvInverse.m[14] = 0.0f;
mvInverse.inverse();
mvInverse.transpose();
GLfloat normalMat[9];
normalMat[0] = mvInverse.m[0];normalMat[1] = mvInverse.m[1];normalMat[2] = mvInverse.m[2];
normalMat[3] = mvInverse.m[4];normalMat[4] = mvInverse.m[5];normalMat[5] = mvInverse.m[6];
normalMat[6] = mvInverse.m[8];normalMat[7] = mvInverse.m[9];normalMat[8] = mvInverse.m[10];
setUniformLocationWithMatrix3fv(_builtInUniforms[UNIFORM_NORMAL_MATRIX], normalMat, 1);
}
if (_flags.usesTime) {
// This doesn't give the most accurate global time value.
// Cocos2D doesn't store a high precision time value, so this will have to do.
// Getting Mach time per frame per shader using time could be extremely expensive.
float time = _director->getTotalFrames() * _director->getAnimationInterval();
setUniformLocationWith4f(_builtInUniforms[GLProgram::UNIFORM_TIME], time/10.0, time, time*2, time*4);
setUniformLocationWith4f(_builtInUniforms[GLProgram::UNIFORM_SIN_TIME], time/8.0, time/4.0, time/2.0, sinf(time));
setUniformLocationWith4f(_builtInUniforms[GLProgram::UNIFORM_COS_TIME], time/8.0, time/4.0, time/2.0, cosf(time));
}
if (_flags.usesRandom)
setUniformLocationWith4f(_builtInUniforms[GLProgram::UNIFORM_RANDOM01], CCRANDOM_0_1(), CCRANDOM_0_1(), CCRANDOM_0_1(), CCRANDOM_0_1());
}
示例5: renderScene
void RenderEngine::renderScene(Mat4 const& matView)
{
Mat4 matViewInv;
float det;
matView.inverse( matViewInv , det );
mEffect.bind();
mEffect.setParam( "gParam.matViewInv" , matViewInv );
glLoadMatrixf( matView );
renderGroup( mParam.world->mRootGroup );
mEffect.unbind();
}
示例6:
/**
* Draw a 3-D model.
*/
void ShaderNode::setCommon3dEnvironment()
{
// Create world matrix
Mat4 transMatrix;
Mat4::createTranslation(_position3d.x, _position3d.y, _position3d.z, &transMatrix);
Mat4 rotXMatrix;
Mat4::createRotationX(_rotation3d.x, &rotXMatrix);
Mat4 rotYMatrix;
Mat4::createRotationY(_rotation3d.y, &rotYMatrix);
Mat4 rotZMatrix;
Mat4::createRotationZ(_rotation3d.z, &rotZMatrix);
Mat4 scaleMatrix;
Mat4::createScale(_scale3d.x, _scale3d.y, _scale3d.z, &scaleMatrix);
// Compute transformation Scaling->Roataion->Translation
Mat4 worldMatrix;
worldMatrix = transMatrix * rotYMatrix * rotXMatrix * rotZMatrix * scaleMatrix;
// Pass the world matrix
getGLProgramState()->setUniformMat4("u_worldMatrix", worldMatrix);
// Pass the view matrix
getGLProgramState()->setUniformMat4("u_viewMatrix", _camera->getViewMatrix());
// Pass the projection matrix
getGLProgramState()->setUniformMat4("u_projectionMatrix", _camera->getProjectionMatrix());
// Pass the inverse - transpose matrix of model-view matrix to transform normals
Mat4 modelViewMatrixIT = _camera->getViewMatrix() * worldMatrix;
modelViewMatrixIT.inverse();
modelViewMatrixIT.transpose();
getGLProgramState()->setUniformMat4("u_normalMatrix", modelViewMatrixIT);
// Pass the direction of a light
getGLProgramState()->setUniformVec3("u_lightDirection0", _light->getDirection());
// Pass the eye position
getGLProgramState()->setUniformVec3("u_eye", _camera->getEye());
// Pass the texture
if (_texture0 != nullptr)
{
getGLProgramState()->setUniformTexture("u_texture0", _texture0);
}
// Pass the normal map
if (_texture1 != nullptr)
{
getGLProgramState()->setUniformTexture("u_texture1", _texture1);
}
}
示例7: setUniformsForBuiltins
void GLProgram::setUniformsForBuiltins(const Mat4 &matrixMV)
{
auto& matrixP = _director->getMatrix(MATRIX_STACK_TYPE::MATRIX_STACK_PROJECTION);
if(_flags.usesP)
setUniformLocationWithMatrix4fv(_builtInUniforms[UNIFORM_P_MATRIX], matrixP.m, 1);
if(_flags.usesMV)
setUniformLocationWithMatrix4fv(_builtInUniforms[UNIFORM_MV_MATRIX], matrixMV.m, 1);
if(_flags.usesMVP) {
Mat4 matrixMVP = matrixP * matrixMV;
setUniformLocationWithMatrix4fv(_builtInUniforms[UNIFORM_MVP_MATRIX], matrixMVP.m, 1);
}
if (_flags.usesNormal)
{
Mat4 mvInverse = matrixMV;
mvInverse.m[12] = mvInverse.m[13] = mvInverse.m[14] = 0.0f;
mvInverse.inverse();
mvInverse.transpose();
GLfloat normalMat[9];
normalMat[0] = mvInverse.m[0];
normalMat[1] = mvInverse.m[1];
normalMat[2] = mvInverse.m[2];
normalMat[3] = mvInverse.m[4];
normalMat[4] = mvInverse.m[5];
normalMat[5] = mvInverse.m[6];
normalMat[6] = mvInverse.m[8];
normalMat[7] = mvInverse.m[9];
normalMat[8] = mvInverse.m[10];
setUniformLocationWithMatrix3fv(_builtInUniforms[UNIFORM_NORMAL_MATRIX], normalMat, 1);
}
if(_flags.usesTime) {
// This doesn't give the most accurate global time value.
// Cocos2D doesn't store a high precision time value, so this will have to do.
// Getting Mach time per frame per shader using time could be extremely expensive.
float time = _director->getTotalFrames() * _director->getAnimationInterval();
setUniformLocationWith4f(_builtInUniforms[GLProgram::UNIFORM_TIME], time/10.0, time, time*2, time*4);
setUniformLocationWith4f(_builtInUniforms[GLProgram::UNIFORM_SIN_TIME], time/8.0, time/4.0, time/2.0, sinf(time));
setUniformLocationWith4f(_builtInUniforms[GLProgram::UNIFORM_COS_TIME], time/8.0, time/4.0, time/2.0, cosf(time));
}
if(_flags.usesRandom)
setUniformLocationWith4f(_builtInUniforms[GLProgram::UNIFORM_RANDOM01], CCRANDOM_0_1(), CCRANDOM_0_1(), CCRANDOM_0_1(), CCRANDOM_0_1());
}
示例8: getVertexTransformMatrix
Mat4 Bone::getVertexTransformMatrix(const Mat4& meshGeometryMatrix, const Mat4& globalPositionMatrix)
{
if (this->linkMode == LinkMode::Additive) {
// TODO:
assert(false);
return Mat4();
} else {
this->updateMatrix();
Mat4 globalInitPosition = transformMatrix * meshGeometryMatrix;
Mat4 transformLinkMatrixInverse = transformLinkMatrix.inverse();
Mat4 clusterRelativeInitPosition = transformLinkMatrixInverse * globalInitPosition;
Mat4 globalCurrentPositionInverse = globalPositionMatrix.inverse();
Mat4 clusterRelativeCurrentPositionInverse = globalCurrentPositionInverse * getComponent<Transform>()->getWorldMatrix();
return clusterRelativeCurrentPositionInverse * clusterRelativeInitPosition;
}
}
示例9: setPosition
void Object::setPosition(const Vector2D &position,bool global)
{
if( !global || !parent || !(parentMode & ENABLE_POSITION))
transform.position=position;
else
{
Vector2D newposition;
switch (parentMode)
{
//rotation
default:
{
//get word position
Mat4 wordPos;
Transform2D toWord;
computeMatrix(toWord, wordPos);
wordPos.inverse();
newposition=wordPos.mul2D(position);
if(!(parentMode & ENABLE_SCALE))
newposition/=getGlobalParentScale();
}
break;
//no rotation
case ENABLE_POSITION:
case ENABLE_POSITION|ENABLE_SCALE:
{
Mat4 pRotation;
pRotation.setRotZ(Angle(Radian(Math::PI2))- parent->getGlobalMatrix().getRotZ());
newposition = pRotation.mul2D( position - parent->getPosition(true)) / getGlobalParentScale();
}
break;
}
transform.position=newposition;
}
change();
}
示例10: screenToWorld
// Utils
// Some links:
// ***** http://www.songho.ca/opengl/gl_transform.html
// ** http://www.vb6.us/tutorials/using-mouse-click-3d-space-directx-8
// *** http://www.mvps.org/directx/articles/rayproj.htm
// -------------------------------------------------------------------------
Vec3 Camera::screenToWorld(float nX, float nY, float nZ) {
float mouse_x = nX;
float mouse_y = nY;
// @todo
printf("ERROR in CAMERA: ofGetWidth(), ofGetHeight() cannot be used! pass as param!");
float screen_w = 100;
float screen_h = 200;
// float screen_w = ofGetWidth();
// float screen_h = ofGetHeight();
float aspect = screen_w/screen_h;
float ndx = -1.0f + (mouse_x/screen_w) * 2.0f; // left: -1, right: 1
float ndy = (1.0f - (mouse_y/(screen_h * 0.5))); // top: -1, bottom: 1
float ndz = 2.0*nZ-1.0;
//ndz = nZ;
updateViewMatrix();
updateProjectionMatrix();
Vec4 ndc(ndx, ndy, ndz, 1.0);
// cout << "ndc cam:" << ndc << endl;
// Mat4 mvp = mvm() * pm() ;
Mat4 mvp = pm() * vm();
mvp.inverse();
ndc = mvp * ndc;
// cout << "out cam:" << ndc << endl;
// ndc = affine_inverse(mvp) * ndc;
// cout << "@@@@@@@@@@@@@@@@@ cam @@@@@@@@@@@@@@@@@@@@@\n";
//Mat4 inv = affine_inverse(mvp);
//cout << inv;
// cout << mvp ;
// cout << "######################################\n\n";
ndc.w = 1.0f / ndc.w;
Vec3 r(ndc.x * ndc.w, ndc.y * ndc.w, ndc.z * ndc.w);
return r;
}
示例11: draw
void TMXLayer::draw(Renderer *renderer, const Mat4& transform, uint32_t flags)
{
updateTotalQuads();
if( flags != 0 || _dirty || _quadsDirty )
{
Size s = Director::getInstance()->getWinSize();
auto rect = Rect(0, 0, s.width, s.height);
Mat4 inv = transform;
inv.inverse();
rect = RectApplyTransform(rect, inv);
updateTiles(rect);
updateIndexBuffer();
updatePrimitives();
_dirty = false;
}
if(_renderCommands.size() < static_cast<size_t>(_primitives.size()))
{
_renderCommands.resize(_primitives.size());
}
_texture->prepareDraw();
int index = 0;
for(const auto& iter : _primitives)
{
if(iter.second->getCount() > 0)
{
auto& cmd = _renderCommands[index++];
//修改混合模式,满足美术需求和PS一样 added by tokentong 20150327
cmd.init(iter.first, _texture->getName(), getGLProgramState(), BlendFunc::ALPHA_PREMULTIPLIED, iter.second, _modelViewTransform, flags);
renderer->addCommand(&cmd);
}
}
}
示例12: spawnParticles
// The SpawnParticles function is responsible for generating new
// particles in your world. You will call this function as you
// you traverse your model's hierarchy. When you reach a point
// in the hierarchy from where particles should be emitted,
// call this function!
//
// SpawnParticles takes the camera transformation matrix as a
// parameter. More on this later.
void SampleModel::spawnParticles(Mat4<float> cameraTransform) {
/****************************************************************
**
** THIS FUNCTION WILL ADD A NEW PARTICLE TO OUR WORLD
**
** Suppose we want particles to spawn from a the model's arm.
** We need to find the location of the model's arm in world
** coordinates so that we can set the initial position of new
** particles. As discussed on the Animator project page,
** all particle positions should be in world coordinates.
**
** At this point in execution, the MODELVIEW matrix contains the
** camera transforms multiplied by some model transforms. In other words,
**
** MODELVIEW = CameraTransforms * ModelTransforms
**
** We are interested only in ModelTransforms, which is the
** transformation that will convert a point from the current, local
** coordinate system to the world coordinate system.
**
** To do this, we're going to "undo" the camera transforms from the
** current MODELVIEW matrix. The camera transform is passed in as
** a parameter to this function (remember when we saved it
** near the top of the model's draw method?). We can "undo" the
** camera transforms by pre-multiplying the current MODELVIEW matrix
** with the inverse of the camera matrix. In other words,
**
** ModelTransforms = InverseCameraTransforms * MODELVIEW
**
********************************************************************/
ParticleSystem *ps = ModelerApplication::Instance()->GetParticleSystem();
//Get the current MODELVIEW matrix.
// ... "Undo" the camera transforms from the MODELVIEW matrix
// ... by multiplying Inverse(CameraTransforms) * CurrentModelViewMatrix.
// ... Store the result of this in a local variable called WorldMatrix.
// ...
Mat4f ModelMatrix = getModelViewMatrix();
Mat4f WorldMatrix = cameraTransform.inverse() * ModelMatrix;
/*****************************************************************
**
** At this point, we have the transformation that will convert a point
** in the local coordinate system to a point in the world coordinate
** system.
**
** We need to find the actual point in world coordinates
** where particle should be spawned. This is simply
** "the origin of the local coordinate system" transformed by
** the WorldMatrix.
**
******************************************************************/
Vec4f Loc = WorldMatrix * Vec4f(0.0, 0.0, 0.0, 1.0);
Vec4f VelL = WorldMatrix * Vec4f(0.0, 0.0, -0.5, 1.0);
Vec4f Vel = VelL - Loc;
Vec3f velocity(Vel[0], Vel[1], Vel[2]);
velocity.normalize();
/*****************************************************************
**
** Now that we have the particle's initial position, we
** can finally add it to our system!
**
***************************************************************/
ps->setParticleStart(Vec3f(Loc[0], Loc[1], Loc[2]), velocity);
}