本文整理汇总了C++中QuatF::mulP方法的典型用法代码示例。如果您正苦于以下问题:C++ QuatF::mulP方法的具体用法?C++ QuatF::mulP怎么用?C++ QuatF::mulP使用的例子?那么, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类QuatF
的用法示例。
在下文中一共展示了QuatF::mulP方法的4个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的C++代码示例。
示例1: integrate
//-----------------------------------------------------------------------------
void Rigid::integrate(F32 delta)
{
// Update Angular position
F32 angle = angVelocity.len();
if (angle != 0.0f) {
QuatF dq;
F32 sinHalfAngle;
mSinCos(angle * delta * -0.5f, sinHalfAngle, dq.w);
sinHalfAngle *= 1.0f / angle;
dq.x = angVelocity.x * sinHalfAngle;
dq.y = angVelocity.y * sinHalfAngle;
dq.z = angVelocity.z * sinHalfAngle;
QuatF tmp = angPosition;
angPosition.mul(tmp, dq);
angPosition.normalize();
// Rotate the position around the center of mass
Point3F lp = linPosition - worldCenterOfMass;
dq.mulP(lp,&linPosition);
linPosition += worldCenterOfMass;
}
// Update angular momentum
angMomentum = angMomentum + torque * delta;
// Update linear position, momentum
linPosition = linPosition + linVelocity * delta;
linMomentum = linMomentum + force * delta;
linVelocity = linMomentum * oneOverMass;
// Update dependent state variables
updateInertialTensor();
updateVelocity();
updateCenterOfMass();
}
示例2: getKineticEnergy
F32 Rigid::getKineticEnergy()
{
Point3F w;
QuatF qmat = angPosition;
qmat.inverse();
qmat.mulP(angVelocity,&w);
const F32* f = invObjectInertia;
return 0.5f * ((mass * mDot(linVelocity,linVelocity)) +
w.x * w.x / f[0] +
w.y * w.y / f[5] +
w.z * w.z / f[10]);
}
示例3: onRender
void GuiTSCtrl::onRender(Point2I offset, const RectI &updateRect)
{
// Save the current transforms so we can restore
// it for child control rendering below.
GFXTransformSaver saver;
bool renderingToTarget = false;
if(!processCameraQuery(&mLastCameraQuery))
{
// We have no camera, but render the GUI children
// anyway. This makes editing GuiTSCtrl derived
// controls easier in the GuiEditor.
renderChildControls( offset, updateRect );
return;
}
GFXTargetRef origTarget = GFX->getActiveRenderTarget();
// Set up the appropriate render style
U32 prevRenderStyle = GFX->getCurrentRenderStyle();
Point2F prevProjectionOffset = GFX->getCurrentProjectionOffset();
Point2I renderSize = getExtent();
if(mRenderStyle == RenderStyleStereoSideBySide)
{
GFX->setCurrentRenderStyle(GFXDevice::RS_StereoSideBySide);
GFX->setCurrentProjectionOffset(mLastCameraQuery.projectionOffset);
GFX->setStereoEyeOffsets(mLastCameraQuery.eyeOffset);
if (!mLastCameraQuery.hasStereoTargets)
{
// Need to calculate our current viewport here
mLastCameraQuery.stereoViewports[0] = updateRect;
mLastCameraQuery.stereoViewports[0].extent.x /= 2;
mLastCameraQuery.stereoViewports[1] = mLastCameraQuery.stereoViewports[0];
mLastCameraQuery.stereoViewports[1].point.x += mLastCameraQuery.stereoViewports[1].extent.x;
}
if (!mLastCameraQuery.hasFovPort)
{
// Need to make our own fovPort
mLastCameraQuery.fovPort[0] = CalculateFovPortForCanvas(mLastCameraQuery.stereoViewports[0], mLastCameraQuery);
mLastCameraQuery.fovPort[1] = CalculateFovPortForCanvas(mLastCameraQuery.stereoViewports[1], mLastCameraQuery);
}
GFX->setStereoFovPort(mLastCameraQuery.fovPort); // NOTE: this specifies fov for BOTH eyes
GFX->setSteroViewports(mLastCameraQuery.stereoViewports);
GFX->setStereoTargets(mLastCameraQuery.stereoTargets);
MatrixF myTransforms[2];
if (smUseLatestDisplayTransform)
{
// Use the view matrix determined from the display device
myTransforms[0] = mLastCameraQuery.eyeTransforms[0];
myTransforms[1] = mLastCameraQuery.eyeTransforms[1];
}
else
{
// Use the view matrix determined from the control object
myTransforms[0] = mLastCameraQuery.cameraMatrix;
myTransforms[1] = mLastCameraQuery.cameraMatrix;
QuatF qrot = mLastCameraQuery.cameraMatrix;
Point3F pos = mLastCameraQuery.cameraMatrix.getPosition();
Point3F rotEyePos;
myTransforms[0].setPosition(pos + qrot.mulP(mLastCameraQuery.eyeOffset[0], &rotEyePos));
myTransforms[1].setPosition(pos + qrot.mulP(mLastCameraQuery.eyeOffset[1], &rotEyePos));
}
GFX->setStereoEyeTransforms(myTransforms);
// Allow render size to originate from the render target
if (mLastCameraQuery.stereoTargets[0])
{
renderSize = mLastCameraQuery.stereoViewports[0].extent;
renderingToTarget = true;
}
}
else
{
GFX->setCurrentRenderStyle(GFXDevice::RS_Standard);
}
if ( mReflectPriority > 0 )
{
// Get the total reflection priority.
F32 totalPriority = 0;
for ( U32 i=0; i < smAwakeTSCtrls.size(); i++ )
if ( smAwakeTSCtrls[i]->isVisible() )
totalPriority += smAwakeTSCtrls[i]->mReflectPriority;
REFLECTMGR->update( mReflectPriority / totalPriority,
getExtent(),
mLastCameraQuery );
}
if(mForceFOV != 0)
//.........这里部分代码省略.........
示例4: _renderCone
void SFXEmitter::_renderCone( F32 radialIncrements, F32 sweepIncrements,
F32 pointDistance,
F32 startAngle, F32 stopAngle,
F32 startVolume, F32 stopVolume,
const ColorI& color )
{
if( startAngle == stopAngle )
return;
const F32 startAngleRadians = mDegToRad( startAngle );
const F32 stopAngleRadians = mDegToRad( stopAngle );
const F32 radialIncrementsRadians = mDegToRad( radialIncrements );
// Unit quaternions representing the start and end angle so we
// can interpolate between the two without flipping.
QuatF rotateZStart( EulerF( 0.f, 0.f, startAngleRadians / 2.f ) );
QuatF rotateZEnd( EulerF( 0.f, 0.f, stopAngleRadians / 2.f ) );
// Do an angular sweep on one side of our XY disc. Since we do a full 360 radial sweep
// around Y for each angle, we only need to sweep over one side.
const F32 increment = 1.f / ( ( ( startAngle / 2.f ) - ( stopAngle / 2.f ) ) / sweepIncrements );
for( F32 t = 0.f; t < 1.0f; t += increment )
{
// Quaternion to rotate point into place on XY disc.
QuatF rotateZ;
rotateZ.interpolate( rotateZStart, rotateZEnd, t );
// Quaternion to rotate one position around Y axis. Used for radial sweep.
QuatF rotateYOne( EulerF( 0.f, radialIncrementsRadians, 0.f ) );
// Do a radial sweep each step along the distance axis. For each step, volume is
// the same for any point on the sweep circle.
for( F32 y = pointDistance; y <= mDescription.mMaxDistance; y += pointDistance )
{
ColorI c = color;
// Compute volume at current point. First off, find the interpolated volume
// in the cone. Only for the outer cone will this actually result in
// interpolation. For the remaining angles, the cone volume is constant.
F32 volume = mLerp( startVolume, stopVolume, t );
if( volume == 0.f )
c.alpha = 0;
else
{
// Apply distance attenuation.
F32 attenuatedVolume = SFXDistanceAttenuation(
SFX->getDistanceModel(),
mDescription.mMinDistance,
mDescription.mMaxDistance,
y,
volume,
SFX->getRolloffFactor() ); //RDTODO
// Fade alpha according to how much volume we
// have left at the current point.
c.alpha = F32( c.alpha ) * ( attenuatedVolume / 1.f );
}
PrimBuild::color( c );
// Create points by doing a full 360 degree radial sweep around Y.
Point3F p( 0.f, y, 0.f );
rotateZ.mulP( p, &p );
for( F32 radialAngle = 0.f; radialAngle < 360.f; radialAngle += radialIncrements )
{
PrimBuild::vertex3f( p.x, p.y, p.z );
rotateYOne.mulP( p, &p );
}
}
}
}