本文整理汇总了C++中PVRTVec3::normalize方法的典型用法代码示例。如果您正苦于以下问题:C++ PVRTVec3::normalize方法的具体用法?C++ PVRTVec3::normalize怎么用?C++ PVRTVec3::normalize使用的例子?那么, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类PVRTVec3
的用法示例。
在下文中一共展示了PVRTVec3::normalize方法的5个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的C++代码示例。
示例1:
/*!****************************************************************************
@Function DrawBalloons
@Input psProgram Program to use
mProjection Projection matrix to use
mView View matrix to use
pmModels A pointer to an array of model matrices
iNum Number of balloons to draw
@Description Draws balloons.
******************************************************************************/
void OGLES2Glass::DrawBalloons(Program* psProgram, PVRTMat4 mProjection, PVRTMat4 mView, PVRTMat4* pmModels, int iNum) {
// Use shader program
glUseProgram(psProgram->uiId);
// Bind texture
glActiveTexture(GL_TEXTURE0);
glBindTexture(GL_TEXTURE_2D, m_uiBalloonTex);
PVRTMat4 mModelView, mMVP;
for (int i = 0; i < iNum; ++i)
{
mModelView = mView * pmModels[i];
mMVP = mProjection * mModelView;
glUniformMatrix4fv(psProgram->auiLoc[eMVMatrix], 1, GL_FALSE, mModelView.ptr());
glUniformMatrix4fv(psProgram->auiLoc[eMVPMatrix], 1, GL_FALSE, mMVP.ptr());
// Calculate and set the model space light direction
PVRTVec3 vLightDir = pmModels[i].inverse() * PVRTVec4(19, 22, -50, 0);
vLightDir = vLightDir.normalize();
glUniform3fv(psProgram->auiLoc[eLightDir], 1, vLightDir.ptr());
// Calculate and set the model space eye position
PVRTVec3 vEyePos = mModelView.inverse() * PVRTVec4(0.0f, 0.0f, 0.0f, 1.0f);
glUniform3fv(psProgram->auiLoc[eEyePos], 1, vEyePos.ptr());
// Now that the uniforms are set, call another function to actually draw the mesh.
DrawMesh(0, &m_Balloon, &m_puiBalloonVbo, &m_puiBalloonIndexVbo, 3);
}
}
示例2: PVRShellGetTime
//.........这里部分代码省略.........
m_ulTimePrev = ulTime;
unsigned long ulDeltaTime = ulTime - m_ulTimePrev;
m_ulTimePrev = ulTime;
m_fFrame += (float)ulDeltaTime * g_fDemoFrameRate;
if (m_fFrame > m_Scene.nNumFrame - 1) m_fFrame = 0;
// Sets the scene animation to this frame
m_Scene.SetFrame(m_fFrame);
/*
Get the direction of the first light from the scene.
*/
PVRTVec4 vLightDirection;
vLightDirection = m_Scene.GetLightDirection(0);
// For direction vectors, w should be 0
vLightDirection.w = 0.0f;
/*
Set up the view and projection matrices from the camera
*/
PVRTMat4 mView, mProjection;
PVRTVec3 vFrom, vTo(0.0f), vUp(0.0f, 1.0f, 0.0f);
float fFOV;
// Setup the camera
// Camera nodes are after the mesh and light nodes in the array
int i32CamID = m_Scene.pNode[m_Scene.nNumMeshNode + m_Scene.nNumLight + g_ui32Camera].nIdx;
// Get the camera position, target and field of view (fov)
if(m_Scene.pCamera[i32CamID].nIdxTarget != -1) // Does the camera have a target?
fFOV = m_Scene.GetCameraPos( vFrom, vTo, g_ui32Camera); // vTo is taken from the target node
else
fFOV = m_Scene.GetCamera( vFrom, vTo, vUp, g_ui32Camera); // vTo is calculated from the rotation
// We can build the model view matrix from the camera position, target and an up vector.
// For this we use PVRTMat4::LookAtRH()
mView = PVRTMat4::LookAtRH(vFrom, vTo, vUp);
// Calculate the projection matrix
bool bRotate = PVRShellGet(prefIsRotated) && PVRShellGet(prefFullScreen);
mProjection = PVRTMat4::PerspectiveFovRH(fFOV, (float)PVRShellGet(prefWidth)/(float)PVRShellGet(prefHeight), g_fCameraNear, g_fCameraFar, PVRTMat4::OGL, bRotate);
/*
A scene is composed of nodes. There are 3 types of nodes:
- MeshNodes :
references a mesh in the pMesh[].
These nodes are at the beginning of the pNode[] array.
And there are nNumMeshNode number of them.
This way the .pod format can instantiate several times the same mesh
with different attributes.
- lights
- cameras
To draw a scene, you must go through all the MeshNodes and draw the referenced meshes.
*/
for (unsigned int i = 0; i < m_Scene.nNumMeshNode; ++i)
{
SPODNode& Node = m_Scene.pNode[i];
// Get the node model matrix
PVRTMat4 mWorld;
mWorld = m_Scene.GetWorldMatrix(Node);
// Pass the model-view-projection matrix (MVP) to the shader to transform the vertices
PVRTMat4 mModelView, mMVP;
mModelView = mView * mWorld;
mMVP = mProjection * mModelView;
glUniformMatrix4fv(m_ShaderProgram.uiMVPMatrixLoc, 1, GL_FALSE, mMVP.f);
// Pass the light direction in model space to the shader
PVRTVec4 vLightDir;
vLightDir = mWorld.inverse() * vLightDirection;
PVRTVec3 vLightDirModel = *(PVRTVec3*)&vLightDir;
vLightDirModel.normalize();
glUniform3fv(m_ShaderProgram.uiLightDirLoc, 1, &vLightDirModel.x);
// Load the correct texture using our texture lookup table
GLuint uiTex = 0;
if(Node.nIdxMaterial != -1)
uiTex = m_puiTextureIDs[Node.nIdxMaterial];
glBindTexture(GL_TEXTURE_2D, uiTex);
/*
Now that the model-view matrix is set and the materials are ready,
call another function to actually draw the mesh.
*/
DrawMesh(i);
}
// Display the demo name using the tools. For a detailed explanation, see the training course IntroducingPVRTools
m_Print3D.DisplayDefaultTitle("IntroducingPOD", "", ePVRTPrint3DSDKLogo);
m_Print3D.Flush();
return true;
}
示例3: PVRShellOutputDebug
//==========================================================================================================================================
bool OGLES2Water::GenerateNormalisationCubeMap(int uiTextureSize)
{
// variables
float fOffset = 0.5f;
float fHalfSize = uiTextureSize *0.5f;
PVRTVec3 vTemp;
unsigned char* pByte;
unsigned char* pData = new unsigned char[uiTextureSize*uiTextureSize*3];
if(!pData)
{
PVRShellOutputDebug("Unable to allocate memory for texture data for cube map\n");
return false;
}
// Positive X
pByte = pData;
for(int j = 0; j < uiTextureSize; ++j)
{
for(int i = 0; i < uiTextureSize; ++i)
{
vTemp.x = fHalfSize;
vTemp.y = -(j + fOffset - fHalfSize);
vTemp.z = -(i + fOffset - fHalfSize);
// normalize, pack 0 to 1 here, and normalize again
vTemp = vTemp.normalize() *0.5 + 0.5;
pByte[0] = (unsigned char)(vTemp.x * 255);
pByte[1] = (unsigned char)(vTemp.y * 255);
pByte[2] = (unsigned char)(vTemp.z * 255);
pByte += 3;
}
}
glTexImage2D(GL_TEXTURE_CUBE_MAP_POSITIVE_X, 0, GL_RGB /*GL_RGBA8*/, uiTextureSize, uiTextureSize, 0, GL_RGB, GL_UNSIGNED_BYTE, pData);
// Negative X
pByte = pData;
for(int j = 0; j < uiTextureSize; ++j)
{
for(int i = 0; i < uiTextureSize; ++i)
{
vTemp.x = -fHalfSize;
vTemp.y = -(j + fOffset - fHalfSize);
vTemp.z = (i + fOffset - fHalfSize);
// normalize, pack 0 to 1 here, and normalize again
vTemp = vTemp.normalize() *0.5 + 0.5;
pByte[0] = (unsigned char)(vTemp.x * 255);
pByte[1] = (unsigned char)(vTemp.y * 255);
pByte[2] = (unsigned char)(vTemp.z * 255);
pByte += 3;
}
}
glTexImage2D(GL_TEXTURE_CUBE_MAP_NEGATIVE_X, 0, GL_RGB /*GL_RGBA8*/, uiTextureSize, uiTextureSize, 0, GL_RGB, GL_UNSIGNED_BYTE, pData);
// Positive Y
pByte = pData;
for(int j = 0; j < uiTextureSize; ++j)
{
for(int i = 0; i < uiTextureSize; ++i)
{
vTemp.x = i + fOffset - fHalfSize;
vTemp.y = fHalfSize;
vTemp.z = j + fOffset - fHalfSize;
// normalize, pack 0 to 1 here, and normalize again
vTemp = vTemp.normalize() *0.5 + 0.5;
pByte[0] = (unsigned char)(vTemp.x * 255);
pByte[1] = (unsigned char)(vTemp.y * 255);
pByte[2] = (unsigned char)(vTemp.z * 255);
pByte += 3;
}
}
glTexImage2D(GL_TEXTURE_CUBE_MAP_POSITIVE_Y, 0, GL_RGB /*GL_RGBA8*/, uiTextureSize, uiTextureSize, 0, GL_RGB, GL_UNSIGNED_BYTE, pData);
// Negative Y
pByte = pData;
for(int j = 0; j < uiTextureSize; ++j)
{
for(int i = 0; i < uiTextureSize; ++i)
{
vTemp.x = i + fOffset - fHalfSize;
vTemp.y = -fHalfSize;
vTemp.z = -(j + fOffset - fHalfSize);
// normalize, pack 0 to 1 here, and normalize again
vTemp = vTemp.normalize() *0.5 + 0.5;
pByte[0] = (unsigned char)(vTemp.x * 255);
//.........这里部分代码省略.........
示例4: ComputeVertexAndNormals
void ParametricSurface::ComputeVertexAndNormals(PFUNCTION function, float dMinU, float dMaxU, float dMinV, float dMaxV)
{
int nVertex = nSampleU * nSampleV;
pVertex = new float[nVertex*3];
pNormal = new float[nVertex*3];
pUV = new float[nVertex*2];
fMinU = dMinU;
fMaxU = dMaxU;
fMinV = dMinV;
fMaxV = dMaxV;
for (int i=0; i<nSampleU; i++)
{
for (int j=0; j<nSampleV; j++)
{
float u = fMinU + i * (fMaxU-fMinU) / (float)(nSampleU-1);
float v = fMinV + j * (fMaxV-fMinV) / (float)(nSampleV-1);
float x,y,z;
function(u,v, &x,&y,&z);
pVertex[(j*nSampleU+i)*3 + 0] = x;
pVertex[(j*nSampleU+i)*3 + 1] = y;
pVertex[(j*nSampleU+i)*3 + 2] = z;
}
}
for (int i=0; i<nSampleU; i++)
{
for (int j=0; j<nSampleV; j++)
{
pUV[ (j*nSampleU+i)*2 + 0 ] = (float)i / (float)(nSampleU-1);
pUV[ (j*nSampleU+i)*2 + 1 ] = (float)j / (float)(nSampleV-1);
}
}
for (int i=0; i<nSampleU-1; i++)
{
for (int j=0; j<nSampleV-1; j++)
{
PVRTVec3 ptA = PVRTVec3(pVertex[(j*nSampleU+i)*3+0],pVertex[(j*nSampleU+i)*3+1],pVertex[(j*nSampleU+i)*3+2]);
PVRTVec3 ptB = PVRTVec3(pVertex[(j*nSampleU+i+1)*3+0],pVertex[(j*nSampleU+i+1)*3+1],pVertex[(j*nSampleU+i+1)*3+2]);
PVRTVec3 ptC = PVRTVec3(pVertex[((j+1)*nSampleU+i)*3+0],pVertex[((j+1)*nSampleU+i)*3+1],pVertex[((j+1)*nSampleU+i)*3+2]);
PVRTVec3 AB = PVRTVec3(ptB.x-ptA.x, ptB.y-ptA.y, ptB.z-ptA.z);
PVRTVec3 AC = PVRTVec3(ptC.x-ptA.x, ptC.y-ptA.y, ptC.z-ptA.z);
PVRTVec3 normal;
normal = AB.cross(AC);
normal.normalize();
pNormal[(j*nSampleU+i)*3 + 0] = -normal.x;
pNormal[(j*nSampleU+i)*3 + 1] = -normal.y;
pNormal[(j*nSampleU+i)*3 + 2] = -normal.z;
}
}
for (int i=0; i<nSampleU-1; i++)
{
pNormal[((nSampleV-1)*nSampleU+i)*3+0] = pNormal[(i)*3+0];
pNormal[((nSampleV-1)*nSampleU+i)*3+1] = pNormal[(i)*3+1];
pNormal[((nSampleV-1)*nSampleU+i)*3+2] = pNormal[(i)*3+2];
}
for (int j=0; j<nSampleV-1; j++)
{
pNormal[(j*nSampleU+nSampleU-1)*3+0] = pNormal[(j*nSampleU)*3+0];
pNormal[(j*nSampleU+nSampleU-1)*3+1] = pNormal[(j*nSampleU)*3+1];
pNormal[(j*nSampleU+nSampleU-1)*3+2] = pNormal[(j*nSampleU)*3+2];
}
pNormal[((nSampleV-1)*nSampleU + (nSampleU-1))*3+0]= pNormal[((nSampleV-2)*nSampleU + (nSampleU-2))*3+0];
pNormal[((nSampleV-1)*nSampleU + (nSampleU-1))*3+1]= pNormal[((nSampleV-2)*nSampleU + (nSampleU-2))*3+1];
pNormal[((nSampleV-1)*nSampleU + (nSampleU-1))*3+2]= pNormal[((nSampleV-2)*nSampleU + (nSampleU-2))*3+2];
// Insert generated data into vertex buffer objects.
glBindBuffer(GL_ARRAY_BUFFER, iVertexVBO);
glBufferData(GL_ARRAY_BUFFER, nVertex * 3 * sizeof (float), pVertex, GL_STATIC_DRAW);
glBindBuffer(GL_ARRAY_BUFFER, iUvVBO);
glBufferData(GL_ARRAY_BUFFER, nVertex * 2 * sizeof (float), pUV, GL_STATIC_DRAW);
glBindBuffer(GL_ARRAY_BUFFER, iNormalVBO);
glBufferData(GL_ARRAY_BUFFER, nVertex * 3 * sizeof (float), pNormal, GL_STATIC_DRAW);
glBindBuffer(GL_ARRAY_BUFFER, 0); // Unbind the last buffer used.
delete[] pVertex;
delete[] pNormal;
delete[] pUV;
}
示例5: UpdateFurShells
/*!****************************************************************************
@Function UpdateFurShells
@Description Update the fur shells. This is only called when the number of
shells change.
******************************************************************************/
void OGLESFur::UpdateFurShells()
{
PVRTVec3 *pvSrcN, *pvSrcV;
PVRTVec3 vTransNorm;
PVRTVec4 vTransPos;
SVertex *pvData;
int i;
unsigned int j;
float fDepth, *pUV;
int i32MeshIndex = m_Scene.pNode[eDuckBody].nIdx;
SPODMesh* pMesh = &m_Scene.pMesh[i32MeshIndex];
PVRTMat4 mModel;
PVRTMat3 mModel3;
m_Scene.GetWorldMatrix(mModel, m_Scene.pNode[eDuckBody]);
mModel3 = PVRTMat3(mModel);
pvData = new SVertex[pMesh->nNumVertex];
if(!pvData)
return;
for(i = 0; i < m_i32FurShellNo; ++i)
{
fDepth = (c_fFurDepth * (float)(i+1) / (float)m_i32FurShellNo);
for(j = 0; j < pMesh->nNumVertex; ++j)
{
pvSrcN = (PVRTVec3*) (pMesh->pInterleaved + (size_t) pMesh->sNormals.pData + (j * pMesh->sNormals.nStride));
pvSrcV = (PVRTVec3*) (pMesh->pInterleaved + (size_t) pMesh->sVertex.pData + (j * pMesh->sVertex.nStride));
pUV = (float*) (pMesh->pInterleaved + (size_t) pMesh->psUVW[0].pData + (j * pMesh->psUVW[0].nStride));
// Transform the vertex position so it is in world space
PVRTVec4 vPos4 = PVRTVec4(*pvSrcV, 1.0f);
PVRTTransform(&vTransPos, &vPos4, &mModel);
// Transform the vertex normal so it is in world space
vTransNorm.x = mModel.f[0] * pvSrcN->x + mModel.f[4] * pvSrcN->y + mModel.f[8] * pvSrcN->z;
vTransNorm.y = mModel.f[1] * pvSrcN->x + mModel.f[5] * pvSrcN->y + mModel.f[9] * pvSrcN->z;
vTransNorm.z = mModel.f[2] * pvSrcN->x + mModel.f[6] * pvSrcN->y + mModel.f[10]* pvSrcN->z;
vTransNorm.normalize();
pvData[j].x = vTransPos.x + (vTransNorm.x * fDepth);
pvData[j].y = vTransPos.y + (vTransNorm.y * fDepth);
pvData[j].z = vTransPos.z + (vTransNorm.z * fDepth);
pvData[j].nx = vTransNorm.x;
pvData[j].ny = vTransNorm.y;
pvData[j].nz = vTransNorm.z;
pvData[j].tu = pUV[0];
pvData[j].tv = pUV[1];
}
glBindBuffer(GL_ARRAY_BUFFER, m_uiShellVbo[i]);
unsigned int uiSize = pMesh->nNumVertex * sizeof(SVertex);
glBufferData(GL_ARRAY_BUFFER, uiSize, pvData, GL_STATIC_DRAW);
glBindBuffer(GL_ARRAY_BUFFER, 0);
}
delete[] pvData;
}