本文整理汇总了C++中CPVRTModelPOD::GetWorldMatrix方法的典型用法代码示例。如果您正苦于以下问题:C++ CPVRTModelPOD::GetWorldMatrix方法的具体用法?C++ CPVRTModelPOD::GetWorldMatrix怎么用?C++ CPVRTModelPOD::GetWorldMatrix使用的例子?那么, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类CPVRTModelPOD
的用法示例。
在下文中一共展示了CPVRTModelPOD::GetWorldMatrix方法的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的C++代码示例。
示例1:
/*!****************************************************************************
@Function DrawSceneWithShadow
@Input viewMat The view matrix to use for rendering
@Description Draws the scene with the shadow
******************************************************************************/
void OGLES2ShadowMapping::DrawSceneWithShadow(PVRTMat4 viewMat)
{
for (unsigned int i = 0; i < m_Scene.nNumMeshNode; ++i)
{
if(i == 1) continue;
SPODNode& Node = m_Scene.pNode[i];
PVRTMat4 mWorld, mModelView;
m_Scene.GetWorldMatrix(mWorld, Node);
PVRTMatrixMultiply(mModelView, mWorld, viewMat);
glUniformMatrix4fv(m_ShadowShaderProgram.uiModelViewMatrixLoc, 1, GL_FALSE, mModelView.f);
// Calculate the light direction for the diffuse lighting
PVRTVec4 vLightDir;
PVRTTransformBack(&vLightDir, &m_vLightDirection, &mWorld);
PVRTVec3 vLightDirModel = *(PVRTVec3*)&vLightDir;
PVRTMatrixVec3Normalize(vLightDirModel, vLightDirModel);
glUniform3fv(m_ShadowShaderProgram.uiLightDirLoc, 1, &vLightDirModel.x);
// Load the correct texture using our texture lookup table
GLuint uiTex = 0;
if (Node.nIdxMaterial != -1)
uiTex = m_puiTextureIDs[Node.nIdxMaterial];
glActiveTexture(GL_TEXTURE1);
glBindTexture(GL_TEXTURE_2D, uiTex);
DrawMesh(i);
}
}
示例2: DrawModel
/*******************************************************************************
* Function Name : DrawModel
* Inputs : iOptim
* Description : Draws the balloon
*******************************************************************************/
void OGLESOptimizeMesh::DrawModel( int iOptim )
{
SPODMesh *pMesh;
glMatrixMode(GL_MODELVIEW);
glPushMatrix();
PVRTMATRIX worldMatrix;
m_Model.GetWorldMatrix(worldMatrix, m_Model.pNode[0]);
glMultMatrixf(worldMatrix.f);
// Enable States
glEnableClientState(GL_VERTEX_ARRAY);
glEnableClientState(GL_NORMAL_ARRAY);
// Set Data Pointers and bing the VBOs
switch(iOptim)
{
default:
pMesh = m_Model.pMesh;
glBindBuffer(GL_ARRAY_BUFFER, m_puiVbo[0]);
glBindBuffer(GL_ELEMENT_ARRAY_BUFFER, m_puiIndexVbo[0]);
break;
case 1:
pMesh = m_ModelOpt.pMesh;
glBindBuffer(GL_ARRAY_BUFFER, m_puiVbo[1]);
glBindBuffer(GL_ELEMENT_ARRAY_BUFFER, m_puiIndexVbo[1]);
break;
}
// Load the meshes material properties
SPODMaterial& Material = m_Model.pMaterial[m_Model.pNode[0].nIdxMaterial];
glMaterialfv(GL_FRONT_AND_BACK, GL_AMBIENT, PVRTVec4(Material.pfMatAmbient, 1.0f).ptr());
glMaterialfv(GL_FRONT_AND_BACK, GL_DIFFUSE, PVRTVec4(Material.pfMatDiffuse, 1.0f).ptr());
// Used to display interleaved geometry
glVertexPointer(3, GL_FLOAT, pMesh->sVertex.nStride, pMesh->sVertex.pData);
glNormalPointer(GL_FLOAT, pMesh->sNormals.nStride, pMesh->sNormals.pData);
// Draw
glDrawElements(GL_TRIANGLES, pMesh->nNumFaces * 3, GL_UNSIGNED_SHORT, 0);
// Disable States
glDisableClientState(GL_VERTEX_ARRAY);
glDisableClientState(GL_NORMAL_ARRAY);
// unbind the vertex buffers as we don't need them bound anymore
glBindBuffer(GL_ARRAY_BUFFER, 0);
glBindBuffer(GL_ELEMENT_ARRAY_BUFFER, 0);
glPopMatrix();
}
示例3: DrawMesh
/*!****************************************************************************
@Function DrawMesh
@Input i32NodeIndex Node index of the mesh to draw
@Description Draws a SPODMesh.
******************************************************************************/
void OGLESFur::DrawMesh(int i32NodeIndex)
{
PVRTMat4 mWorld;
glPushMatrix();
// Setup the transformation for this mesh
m_Scene.GetWorldMatrix(mWorld, m_Scene.pNode[i32NodeIndex]);
glMultMatrixf(mWorld.f);
// Get the mesh
int ui32MeshID = m_Scene.pNode[i32NodeIndex].nIdx;
SPODMesh* pMesh = &m_Scene.pMesh[ui32MeshID];
// bind the VBO for the mesh
glBindBuffer(GL_ARRAY_BUFFER, m_puiVbo[ui32MeshID]);
// bind the index buffer, won't hurt if the handle is 0
glBindBuffer(GL_ELEMENT_ARRAY_BUFFER, m_puiIndexVbo[ui32MeshID]);
glVertexPointer(3, GL_FLOAT, pMesh->sVertex.nStride, pMesh->sVertex.pData);
glNormalPointer(GL_FLOAT, pMesh->sNormals.nStride, pMesh->sNormals.pData);
// Do we have texture co-ordinates
if(pMesh->nNumUVW != 0)
{
glEnableClientState(GL_TEXTURE_COORD_ARRAY);
glTexCoordPointer(2, GL_FLOAT, pMesh->psUVW[0].nStride, pMesh->psUVW[0].pData);
}else
glDisableClientState(GL_TEXTURE_COORD_ARRAY);
// Indexed Triangle list
glDrawElements(GL_TRIANGLES, pMesh->nNumFaces * 3, GL_UNSIGNED_SHORT, 0);
// unbind the vertex buffers as we don't need them bound anymore
glBindBuffer(GL_ARRAY_BUFFER, 0);
glBindBuffer(GL_ELEMENT_ARRAY_BUFFER, 0);
glPopMatrix();
}
示例4: if
/*!***************************************************************************
@Function DrawPODScene
@Input mViewProjection
@Input bDrawCamera
@Description Draws the scene described by the loaded POD file.
*****************************************************************************/
void OGLES3TextureStreaming::DrawPODScene(const PVRTMat4 &mViewProjection)
{
// Clear the colour and depth buffer
glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT);
// Get the position of the first light from the scene.
PVRTVec4 vLightPosition = m_Scene.GetLightPosition(0);
int iTVCount = 0;
#if defined(__ANDROID__)
// Check if the MVP has changed
if (m_Camera.HasImageChanged() && m_Camera.HasProjectionMatrixChanged())
{
m_TexCoordsProjection = PVRTMat4(m_Camera.GetProjectionMatrix());
}
#endif
for(unsigned int i = 0; i < m_Scene.nNumMeshNode; ++i)
{
SPODNode& Node = m_Scene.pNode[i];
bool bIsTVScreen = Node.nIdxMaterial == m_uiTVScreen;
bool bIsRecordGlow = Node.nIdxMaterial == m_uiRecordGlow;
// Get the node model matrix
PVRTMat4 mWorld = m_Scene.GetWorldMatrix(Node);
PVRTMat4 mModelView, mMVP;
mMVP = mViewProjection * mWorld;
GLint iMVPLoc = -1;
#if defined(__ANDROID__)
GLint iTexProjLoc = -1;
#endif
if(bIsTVScreen) // If we're drawing the TV screen change to the correct TV shader
{
_ASSERT(iTVCount < c_numTVScreens);
if(c_screenEffects[iTVCount] == eTVNoise)
{
glUseProgram(m_TVNoiseShaderProgram.uiId);
iMVPLoc = m_TVNoiseShaderProgram.uiMVP;
#if defined(__ANDROID__)
iTexProjLoc = m_TVNoiseShaderProgram.uiVideoTexProjM;
#endif
// Do the screen scrolling
float fBandY1 = m_fBandScroll;
float fBandY2 = fBandY1 + c_fBandWidth;
glUniform2f(m_TVNoiseShaderProgram.uiScreenBand, fBandY1, fBandY2);
// Do the noise
PVRTVec2 vNoiseCoords;
vNoiseCoords.x = (m_iNoiseCoordIdx % 4) * 0.25f;
vNoiseCoords.y = (m_iNoiseCoordIdx / 4) * 0.25f;
// Set the texmod value
glUniform2f(m_TVNoiseShaderProgram.uiNoiseLoc, vNoiseCoords.x, vNoiseCoords.y);
// Increment and reset
m_iNoiseCoordIdx++;
if(m_iNoiseCoordIdx >= 16)
m_iNoiseCoordIdx = 0;
}
else if(c_screenEffects[iTVCount] == eTVGreyscale)
{
glUseProgram(m_TVGreyscaleShaderProgram.uiId);
iMVPLoc = m_TVGreyscaleShaderProgram.uiMVP;
#if defined(__ANDROID__)
iTexProjLoc = m_TVGreyscaleShaderProgram.uiVideoTexProjM;
#endif
}
else if(c_screenEffects[iTVCount] == eTVColour)
{
glUseProgram(m_TVShaderProgram.uiId);
iMVPLoc = m_TVShaderProgram.uiMVP;
#if defined(__ANDROID__)
iTexProjLoc = m_TVShaderProgram.uiVideoTexProjM;
#endif
}
else
{
_ASSERT(false); // Invalid enum
}
iTVCount++;
}
else if(bIsRecordGlow)
{
// Should the glow be active?
unsigned long ulNow = PVRShellGetTime();
if(ulNow - m_ulGlowTime > 1000)
{
m_bGlowState = !m_bGlowState;
m_ulGlowTime = ulNow;
}
//.........这里部分代码省略.........
示例5: RenderScene
/*!****************************************************************************
@Function RenderScene
@Return bool true if no error occured
@Description Main rendering loop function of the program. The shell will
call this function every frame.
eglSwapBuffers() will be performed by PVRShell automatically.
PVRShell will also manage important OS events.
Will also manage relevent OS events. The user has access to
these events through an abstraction layer provided by PVRShell.
******************************************************************************/
bool OGLESPVRScopeExample::RenderScene()
{
// Keyboard input (cursor up/down to cycle through counters)
if(PVRShellIsKeyPressed(PVRShellKeyNameUP))
{
m_i32Counter++;
if(m_i32Counter > (int) m_pScopeGraph->GetCounterNum())
m_i32Counter = m_pScopeGraph->GetCounterNum();
}
if(PVRShellIsKeyPressed(PVRShellKeyNameDOWN))
{
m_i32Counter--;
if(m_i32Counter < 0)
m_i32Counter = 0;
}
if(PVRShellIsKeyPressed(PVRShellKeyNameACTION2))
m_pScopeGraph->ShowCounter(m_i32Counter, !m_pScopeGraph->IsCounterShown(m_i32Counter));
// Keyboard input (cursor left/right to change active group)
if(PVRShellIsKeyPressed(PVRShellKeyNameRIGHT))
{
m_pScopeGraph->SetActiveGroup(m_pScopeGraph->GetActiveGroup()+1);
}
if(PVRShellIsKeyPressed(PVRShellKeyNameLEFT))
{
m_pScopeGraph->SetActiveGroup(m_pScopeGraph->GetActiveGroup()-1);
}
// Clears the color and depth buffer
glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT);
// Loads the projection matrix
glMatrixMode(GL_PROJECTION);
glLoadMatrixf(m_mProjection.f);
// Specify the modelview matrix
PVRTMat4 mModel;
SPODNode& Node = m_Scene.pNode[0];
m_Scene.GetWorldMatrix(mModel, Node);
// Rotate and Translate the model matrix
m_fAngleY += (2*PVRT_PIf/60)/7;
// Set model view projection matrix
PVRTMat4 mModelView;
mModelView = m_mView * PVRTMat4::RotationY(m_fAngleY) * mModel;
glMatrixMode(GL_MODELVIEW);
glLoadMatrixf(mModelView.f);
/*
Load the light direction from the scene if we have one
*/
// Enables lighting. See BasicTnL for a detailed explanation
glEnable(GL_LIGHTING);
glEnable(GL_LIGHT0);
// Set light direction
PVRTVec4 vLightDirModel;
vLightDirModel = mModel.inverse() * PVRTVec4(1, 1, 1, 0);
glLightfv(GL_LIGHT0, GL_POSITION, (float*)&vLightDirModel.x);
// Enable the vertex position attribute array
glEnableClientState(GL_VERTEX_ARRAY);
// bind the texture
glBindTexture(GL_TEXTURE_2D, m_uiTexture);
/*
Now that the model-view matrix is set and the materials are ready,
call another function to actually draw the mesh.
*/
DrawMesh(Node.nIdx);
// Disable the vertex positions
glDisableClientState(GL_VERTEX_ARRAY);
char Description[256];
if(m_pScopeGraph->GetCounterNum())
{
sprintf(Description, "Active Grp %i\n\nCounter %i (Grp %i) \nName: %s\nShown: %s\nuser y-axis: %.2f max: %.2f%s",
m_pScopeGraph->GetActiveGroup(), m_i32Counter,
//.........这里部分代码省略.........
示例6: cos
/*!****************************************************************************
@Function RenderScene
@Return bool true if no error occured
@Description Main rendering loop function of the program. The shell will
call this function every frame.
eglSwapBuffers() will be performed by PVRShell automatically.
PVRShell will also manage important OS events.
Will also manage relevent OS events. The user has access to
these events through an abstraction layer provided by PVRShell.
******************************************************************************/
bool OGLES2ShadowMapping::RenderScene()
{
//rotate light position
m_fLightAngle += 0.01f;
m_vLightPosition.x = m_fLightDistance * (float) cos(m_fLightAngle);
m_vLightPosition.z = m_fLightDistance * (float) sin(m_fLightAngle);
m_vLightDirection.x = -m_vLightPosition.x;
m_vLightDirection.z = -m_vLightPosition.z;
SetUpMatrices();
glEnable(GL_DEPTH_TEST);
// Bind the frame buffer object
glBindFramebuffer(GL_FRAMEBUFFER, m_uiFrameBufferObject);
if(glCheckFramebufferStatus(GL_FRAMEBUFFER) == GL_FRAMEBUFFER_COMPLETE)
{
// Clear the screen and depth buffer so we can render from the light's view
glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT);
// Set the current viewport to our texture size
glViewport(0, 0, m_ui32ShadowMapSize, m_ui32ShadowMapSize);
// Since we don't care about colour when rendering the depth values to
// the shadow-map texture, we disable color writing to increase speed.
glColorMask(GL_FALSE, GL_FALSE, GL_FALSE, GL_FALSE);
// Enable the simple shader for the light view pass. This render will not be shown to the user
// so only the simplest render needs to be implemented
glUseProgram(m_SimpleShaderProgram.uiId);
// Set the light projection matrix
glUniformMatrix4fv(m_SimpleShaderProgram.uiProjectionMatrixLoc, 1, GL_FALSE, m_LightProjection.f);
// Render the world according to the light's view
DrawScene(m_LightView);
// We can turn color writing back on since we already stored the depth values
glColorMask(GL_TRUE, GL_TRUE, GL_TRUE, GL_TRUE);
// Restore our normal viewport size to our screen width and height
glViewport(0, 0,PVRShellGet(prefWidth),PVRShellGet(prefHeight));
}
glBindFramebuffer(GL_FRAMEBUFFER, 0);
// Clear the colour and depth buffers, we are now going to render the scene again from scratch
glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT);
// Load the shadow shader. This shader requires additional parameters; texProjMatrix for the depth buffer
// look up and the light direction for diffuse light (the effect is a lot nicer with the additon of the
// diffuse light).
glUseProgram(m_ShadowShaderProgram.uiId);
glActiveTexture(GL_TEXTURE0);
glBindTexture(GL_TEXTURE_2D, m_uiShadowMapTexture);
glUniformMatrix4fv(m_ShadowShaderProgram.uiProjectionMatrixLoc, 1, GL_FALSE, m_Projection.f);
PVRTMat4 mViewInv, mTextureMatrix, mMatrix;
mViewInv = m_View.inverse();
// We need to calculate the texture projection matrix. This matrix takes the pixels from world space to previously rendered light projection space
//where we can look up values from our saved depth buffer. The matrix is constructed from the light view and projection matrices as used for the previous render and
//then multiplied by the inverse of the current view matrix.
mTextureMatrix = m_BiasMatrix * m_LightProjection * m_LightView * mViewInv;
glUniformMatrix4fv(m_ShadowShaderProgram.uiTexProjMatrixLoc, 1, GL_FALSE, mTextureMatrix.f);
DrawSceneWithShadow(m_View);
// Re-enable the simple shader to draw the light source object
glUseProgram(m_SimpleShaderProgram.uiId);
SPODNode& Node = m_Scene.pNode[1];
PVRTMat4 mWorld, mModelView;
m_Scene.GetWorldMatrix(mWorld, Node);
mWorld.f[12] = m_vLightPosition.x;
mWorld.f[13] = m_vLightPosition.y;
mWorld.f[14] = m_vLightPosition.z;
mModelView = m_View * mWorld;
glUniformMatrix4fv(m_SimpleShaderProgram.uiModelViewMatrixLoc, 1, GL_FALSE, mModelView.f);
glUniformMatrix4fv(m_SimpleShaderProgram.uiProjectionMatrixLoc, 1, GL_FALSE, m_LightProjection.f);
//.........这里部分代码省略.........
示例7: RenderScene
/*!****************************************************************************
@Function RenderScene
@Return bool true if no error occured
@Description Main rendering loop function of the program. The shell will
call this function every frame.
eglSwapBuffers() will be performed by PVRShell automatically.
PVRShell will also manage important OS events.
Will also manage relevent OS events. The user has access to
these events through an abstraction layer provided by PVRShell.
******************************************************************************/
bool OGLESIntroducingPFX::RenderScene()
{
// Clears the color and depth buffer
glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT);
// Use the loaded effect
m_pEffect->Activate();
/*
Calculates the frame number to animate in a time-based manner.
Uses the shell function PVRShellGetTime() to get the time in milliseconds.
*/
int iTime = PVRShellGetTime();
int iDeltaTime = iTime - m_iTimePrev;
m_iTimePrev = iTime;
m_fFrame += (float)iDeltaTime * DEMO_FRAME_RATE;
if (m_fFrame > m_Scene.nNumFrame-1)
m_fFrame = 0;
// Sets the scene animation to this frame
m_Scene.SetFrame(m_fFrame);
{
PVRTVec3 vFrom, vTo, vUp;
VERTTYPE fFOV;
vUp.x = 0.0f;
vUp.y = 1.0f;
vUp.z = 0.0f;
// We can get the camera position, target and field of view (fov) with GetCameraPos()
fFOV = m_Scene.GetCameraPos(vFrom, vTo, 0) * 0.4f;
/*
We can build the world view matrix from the camera position, target and an up vector.
For this we use PVRTMat4LookAtRH().
*/
m_mView = PVRTMat4::LookAtRH(vFrom, vTo, vUp);
// Calculates the projection matrix
bool bRotate = PVRShellGet(prefIsRotated) && PVRShellGet(prefFullScreen);
m_mProjection = PVRTMat4::PerspectiveFovRH(fFOV, (float)PVRShellGet(prefWidth)/(float)PVRShellGet(prefHeight), CAM_NEAR, CAM_FAR, PVRTMat4::OGL, bRotate);
}
/*
A scene is composed of nodes. There are 3 types of nodes:
- MeshNodes :
references a mesh in the pMesh[].
These nodes are at the beginning of the pNode[] array.
And there are nNumMeshNode number of them.
This way the .pod format can instantiate several times the same mesh
with different attributes.
- lights
- cameras
To draw a scene, you must go through all the MeshNodes and draw the referenced meshes.
*/
for (int i=0; i<(int)m_Scene.nNumMeshNode; i++)
{
SPODNode* pNode = &m_Scene.pNode[i];
// Gets pMesh referenced by the pNode
SPODMesh* pMesh = &m_Scene.pMesh[pNode->nIdx];
glBindBuffer(GL_ARRAY_BUFFER, m_aiVboID[i]);
// Gets the node model matrix
PVRTMat4 mWorld;
mWorld = m_Scene.GetWorldMatrix(*pNode);
PVRTMat4 mWorldView;
mWorldView = m_mView * mWorld;
for(unsigned int j = 0; j < m_nUniformCnt; ++j)
{
switch(m_psUniforms[j].nSemantic)
{
case eUsPOSITION:
{
glVertexAttribPointer(m_psUniforms[j].nLocation, 3, GL_FLOAT, GL_FALSE, pMesh->sVertex.nStride, pMesh->sVertex.pData);
glEnableVertexAttribArray(m_psUniforms[j].nLocation);
}
break;
case eUsNORMAL:
{
glVertexAttribPointer(m_psUniforms[j].nLocation, 3, GL_FLOAT, GL_FALSE, pMesh->sNormals.nStride, pMesh->sNormals.pData);
glEnableVertexAttribArray(m_psUniforms[j].nLocation);
}
break;
case eUsUV:
{
glVertexAttribPointer(m_psUniforms[j].nLocation, 2, GL_FLOAT, GL_FALSE, pMesh->psUVW[0].nStride, pMesh->psUVW[0].pData);
//.........这里部分代码省略.........
示例8: UpdateFurShells
/*!****************************************************************************
@Function UpdateFurShells
@Description Update the fur shells. This is only called when the number of
shells change.
******************************************************************************/
void OGLESFur::UpdateFurShells()
{
PVRTVec3 *pvSrcN, *pvSrcV;
PVRTVec3 vTransNorm;
PVRTVec4 vTransPos;
SVertex *pvData;
int i;
unsigned int j;
float fDepth, *pUV;
int i32MeshIndex = m_Scene.pNode[eDuckBody].nIdx;
SPODMesh* pMesh = &m_Scene.pMesh[i32MeshIndex];
PVRTMat4 mModel;
PVRTMat3 mModel3;
m_Scene.GetWorldMatrix(mModel, m_Scene.pNode[eDuckBody]);
mModel3 = PVRTMat3(mModel);
pvData = new SVertex[pMesh->nNumVertex];
if(!pvData)
return;
for(i = 0; i < m_i32FurShellNo; ++i)
{
fDepth = (c_fFurDepth * (float)(i+1) / (float)m_i32FurShellNo);
for(j = 0; j < pMesh->nNumVertex; ++j)
{
pvSrcN = (PVRTVec3*) (pMesh->pInterleaved + (size_t) pMesh->sNormals.pData + (j * pMesh->sNormals.nStride));
pvSrcV = (PVRTVec3*) (pMesh->pInterleaved + (size_t) pMesh->sVertex.pData + (j * pMesh->sVertex.nStride));
pUV = (float*) (pMesh->pInterleaved + (size_t) pMesh->psUVW[0].pData + (j * pMesh->psUVW[0].nStride));
// Transform the vertex position so it is in world space
PVRTVec4 vPos4 = PVRTVec4(*pvSrcV, 1.0f);
PVRTTransform(&vTransPos, &vPos4, &mModel);
// Transform the vertex normal so it is in world space
vTransNorm.x = mModel.f[0] * pvSrcN->x + mModel.f[4] * pvSrcN->y + mModel.f[8] * pvSrcN->z;
vTransNorm.y = mModel.f[1] * pvSrcN->x + mModel.f[5] * pvSrcN->y + mModel.f[9] * pvSrcN->z;
vTransNorm.z = mModel.f[2] * pvSrcN->x + mModel.f[6] * pvSrcN->y + mModel.f[10]* pvSrcN->z;
vTransNorm.normalize();
pvData[j].x = vTransPos.x + (vTransNorm.x * fDepth);
pvData[j].y = vTransPos.y + (vTransNorm.y * fDepth);
pvData[j].z = vTransPos.z + (vTransNorm.z * fDepth);
pvData[j].nx = vTransNorm.x;
pvData[j].ny = vTransNorm.y;
pvData[j].nz = vTransNorm.z;
pvData[j].tu = pUV[0];
pvData[j].tv = pUV[1];
}
glBindBuffer(GL_ARRAY_BUFFER, m_uiShellVbo[i]);
unsigned int uiSize = pMesh->nNumVertex * sizeof(SVertex);
glBufferData(GL_ARRAY_BUFFER, uiSize, pvData, GL_STATIC_DRAW);
glBindBuffer(GL_ARRAY_BUFFER, 0);
}
delete[] pvData;
}
示例9: glClear
/*!****************************************************************************
@Function RenderScene
@Return bool true if no error occurred
@Description Main rendering loop function of the program. The shell will
call this function every frame.
eglSwapBuffers() will be performed by PVRShell automatically.
PVRShell will also manage important OS events.
Will also manage relevant OS events. The user has access to
these events through an abstraction layer provided by PVRShell.
******************************************************************************/
bool OGLES3PhantomMask::RenderScene()
{
if(PVRShellIsKeyPressed(PVRShellKeyNameACTION1))
m_bEnableSH = !m_bEnableSH;
// Clear the colour and depth buffer
glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT);
// Draw the background
m_Background.Draw(m_ui32TexBackground);
// Enable culling
glEnable(GL_CULL_FACE);
// Enable depth testing
glEnable(GL_DEPTH_TEST);
// Use shader program
GLuint ProgramID, MVPLoc, ModelLoc;
if(m_bEnableSH)
{
ProgramID = m_SHShaderProgram.uiId;
MVPLoc = m_SHShaderProgram.auiLoc[eSHMVPMatrix];
ModelLoc = m_SHShaderProgram.auiLoc[eSHModel];
}
else
{
ProgramID = m_DiffuseShaderProgram.uiId;
MVPLoc = m_DiffuseShaderProgram.auiLoc[eDifMVPMatrix];
ModelLoc = m_DiffuseShaderProgram.auiLoc[eDifModel];
}
glUseProgram(ProgramID);
/*
Calculates the frame number to animate in a time-based manner.
Uses the shell function PVRShellGetTime() to get the time in milliseconds.
*/
unsigned long ulTime = PVRShellGetTime();
if(ulTime > m_ulTimePrev)
{
unsigned long ulDeltaTime = ulTime - m_ulTimePrev;
m_fFrame += (float)ulDeltaTime * g_fDemoFrameRate;
if(m_fFrame > m_Scene.nNumFrame - 1)
m_fFrame = 0;
// Sets the scene animation to this frame
m_Scene.SetFrame(m_fFrame);
}
m_ulTimePrev = ulTime;
/*
Set up the view and projection matrices from the camera
*/
PVRTMat4 mView, mProjection;
PVRTVec3 vFrom, vTo(0.0f), vUp(0.0f, 1.0f, 0.0f);
float fFOV;
// Setup the camera
bool bRotate = PVRShellGet(prefIsRotated) && PVRShellGet(prefFullScreen);
// Camera nodes are after the mesh and light nodes in the array
int i32CamID = m_Scene.pNode[m_Scene.nNumMeshNode + m_Scene.nNumLight + g_ui32Camera].nIdx;
// Get the camera position, target and field of view (fov)
if(m_Scene.pCamera[i32CamID].nIdxTarget != -1) // Does the camera have a target?
fFOV = m_Scene.GetCameraPos( vFrom, vTo, g_ui32Camera); // vTo is taken from the target node
else
fFOV = m_Scene.GetCamera( vFrom, vTo, vUp, g_ui32Camera); // vTo is calculated from the rotation
fFOV *= bRotate ? (float)PVRShellGet(prefWidth)/(float)PVRShellGet(prefHeight) : (float)PVRShellGet(prefHeight)/(float)PVRShellGet(prefWidth);
// We can build the model view matrix from the camera position, target and an up vector.
// For this we usePVRTMat4LookAtRH()
mView = PVRTMat4::LookAtRH(vFrom, vTo, vUp);
// Calculate the projection matrix
mProjection = PVRTMat4::PerspectiveFovRH(fFOV, (float)PVRShellGet(prefWidth)/(float)PVRShellGet(prefHeight), g_fCameraNear, g_fCameraFar, PVRTMat4::OGL, bRotate);
SPODNode& Node = m_Scene.pNode[0];
// Get the node model matrix
PVRTMat4 mWorld;
mWorld = m_Scene.GetWorldMatrix(Node);
// Set the model inverse transpose matrix
//.........这里部分代码省略.........
示例10: PVRShellGetTime
//.........这里部分代码省略.........
ChangeSkyboxTo(m_ppEffects[i], m_ui32TextureIDs[4]);
fBurnAnim = 0.0f;
}
if(PVRShellIsKeyPressed(PVRShellKeyNameDOWN))
{
for(i = 0; i < g_ui32NoOfEffects; ++i)
ChangeSkyboxTo(m_ppEffects[i], m_ui32TextureIDs[3]);
fBurnAnim = 0.0f;
}
/* Setup Shader and Shader Constants */
int location;
glDisable(GL_CULL_FACE);
DrawSkybox();
glEnable(GL_CULL_FACE);
m_ppEffects[m_i32Effect]->Activate();
for(i = 0; i < m_Scene.nNumMeshNode; i++)
{
SPODNode* pNode = &m_Scene.pNode[i];
// Gets pMesh referenced by the pNode
SPODMesh* pMesh = &m_Scene.pMesh[pNode->nIdx];
// Gets the node model matrix
PVRTMat4 mWorld, mWORLDVIEW;
mWorld = m_Scene.GetWorldMatrix(*pNode);
mWORLDVIEW = m_mView * mWorld;
glBindBuffer(GL_ARRAY_BUFFER, m_aiVboID[i]);
const CPVRTArray<SPVRTPFXUniform>& Uniforms = m_ppEffects[m_i32Effect]->GetUniformArray();
for(j = 0; j < Uniforms.GetSize(); ++j)
{
switch(Uniforms[j].nSemantic)
{
case ePVRTPFX_UsPOSITION:
{
glVertexAttribPointer(Uniforms[j].nLocation, 3, GL_FLOAT, GL_FALSE, pMesh->sVertex.nStride, pMesh->sVertex.pData);
glEnableVertexAttribArray(Uniforms[j].nLocation);
}
break;
case ePVRTPFX_UsNORMAL:
{
glVertexAttribPointer(Uniforms[j].nLocation, 3, GL_FLOAT, GL_FALSE, pMesh->sNormals.nStride, pMesh->sNormals.pData);
glEnableVertexAttribArray(Uniforms[j].nLocation);
}
break;
case ePVRTPFX_UsUV:
{
glVertexAttribPointer(Uniforms[j].nLocation, 2, GL_FLOAT, GL_FALSE, pMesh->psUVW[0].nStride, pMesh->psUVW[0].pData);
glEnableVertexAttribArray(Uniforms[j].nLocation);
}
break;
case ePVRTPFX_UsWORLDVIEWPROJECTION:
{
PVRTMat4 mMVP;
示例11: PVRShellGetTime
//.........这里部分代码省略.........
break;
default:
glActiveTexture(GL_TEXTURE1);
glBindTexture(GL_TEXTURE_2D, m_ui32TexBeltNormalMap);
glActiveTexture(GL_TEXTURE0);
glBindTexture(GL_TEXTURE_2D, m_ui32TexBelt);
break;
}
DrawSkinnedMesh(i32NodeIndex);
}
// Safely disable the vertex attribute arrays
for(i = 0; i < eNumAttribs; ++i) glDisableVertexAttribArray(i);
// Draw non-skinned meshes
glUseProgram(m_DefaultShaderProgram.uiId);
// Enable the vertex attribute arrays
for(i = 0; i < eNumDefaultAttribs; ++i) glEnableVertexAttribArray(i);
for(unsigned int i32NodeIndex = 3; i32NodeIndex < m_Scene.nNumMeshNode; ++i32NodeIndex)
{
SPODNode& Node = m_Scene.pNode[i32NodeIndex];
SPODMesh& Mesh = m_Scene.pMesh[Node.nIdx];
// bind the VBO for the mesh
glBindBuffer(GL_ARRAY_BUFFER, m_puiVbo[Node.nIdx]);
// bind the index buffer, won't hurt if the handle is 0
glBindBuffer(GL_ELEMENT_ARRAY_BUFFER, m_puiIndexVbo[Node.nIdx]);
// Get the node model matrix
PVRTMat4 mWorld;
mWorld = m_Scene.GetWorldMatrix(Node);
// Setup the appropriate texture and transformation (if needed)
switch(i32NodeIndex)
{
case eWall:
glBindTexture(GL_TEXTURE_2D, m_ui32TexWall);
// Rotate the wall mesh which is circular
mWorld *= PVRTMat4::RotationY(m_fWallPos);
glUniform1f(m_DefaultShaderProgram.auiLoc[eDefaultUOffset], 0);
break;
case eBackground:
glBindTexture(GL_TEXTURE_2D, m_ui32TexSkyLine);
glUniform1f(m_DefaultShaderProgram.auiLoc[eDefaultUOffset], m_fBackgroundPos);
break;
case eLights:
{
glBindTexture(GL_TEXTURE_2D, m_ui32TexLamp);
PVRTMat4 mWallWorld = m_Scene.GetWorldMatrix(m_Scene.pNode[eWall]);
mWorld = mWallWorld * PVRTMat4::RotationY(m_fWallPos) * mWallWorld.inverse() * mWorld;
glUniform1f(m_DefaultShaderProgram.auiLoc[eDefaultUOffset], 0);
}
break;
default:
break;
};
// Set up shader uniforms
PVRTMat4 mModelViewProj;
mModelViewProj = mViewProjection * mWorld;
glUniformMatrix4fv(m_DefaultShaderProgram.auiLoc[eDefaultMVPMatrix], 1, GL_FALSE, mModelViewProj.ptr());
// Set the vertex attribute offsets
glVertexAttribPointer(DEFAULT_VERTEX_ARRAY, 3, GL_FLOAT, GL_FALSE, Mesh.sVertex.nStride, Mesh.sVertex.pData);
glVertexAttribPointer(DEFAULT_TEXCOORD_ARRAY, 2, GL_FLOAT, GL_FALSE, Mesh.psUVW[0].nStride, Mesh.psUVW[0].pData);
// Indexed Triangle list
glDrawElements(GL_TRIANGLES, Mesh.nNumFaces*3, GL_UNSIGNED_SHORT, 0);
}
// Safely disable the vertex attribute arrays
for(i = 0; i < eNumAttribs; ++i) glDisableVertexAttribArray(i);
// unbind the VBOs
glBindBuffer(GL_ARRAY_BUFFER, 0);
glBindBuffer(GL_ELEMENT_ARRAY_BUFFER, 0);
// Display the demo name using the tools. For a detailed explanation, see the training course IntroducingPVRTools
const char * pDescription;
if(m_bEnableDOT3)
pDescription = "Skinning with DOT3 Per Pixel Lighting";
else
pDescription = "Skinning with Vertex Lighting";
m_Print3D.DisplayDefaultTitle("Chameleon Man", pDescription, ePVRTPrint3DSDKLogo);
m_Print3D.Flush();
return true;
}
示例12: hashedName
/*!****************************************************************************
@Function RenderSceneWithEffect
@Return bool true if no error occured
@Description Renders the whole scene with a single effect.
******************************************************************************/
bool OGLES3ShadowMapping::RenderSceneWithEffect(const int uiEffectId, const PVRTMat4 &mProjection, const PVRTMat4 &mView)
{
CPVRTPFXEffect *pEffect = m_ppPFXEffects[uiEffectId];
// Activate the passed effect
pEffect->Activate();
for (unsigned int i=0; i < m_Scene.nNumMeshNode; i++)
{
SPODNode* pNode = &m_Scene.pNode[i];
SPODMesh* pMesh = &m_Scene.pMesh[pNode->nIdx];
SPODMaterial *pMaterial = 0;
if (pNode->nIdxMaterial != -1)
{
pMaterial = &m_Scene.pMaterial[pNode->nIdxMaterial];
// Bind the texture if there is one bound to this object
if (pMaterial->nIdxTexDiffuse != -1)
{
CPVRTString texname = CPVRTString(m_Scene.pTexture[pMaterial->nIdxTexDiffuse].pszName).substitute(".png", "");
CPVRTStringHash hashedName(texname);
if (m_TextureCache.Exists(hashedName))
glBindTexture(GL_TEXTURE_2D, m_TextureCache[hashedName]);
}
}
glBindBuffer(GL_ARRAY_BUFFER, m_puiVbo[i]);
glBindBuffer(GL_ELEMENT_ARRAY_BUFFER, m_puiIndexVbo[i]);
// Pre-calculate commonly used matrices
PVRTMat4 mWorld;
m_Scene.GetWorldMatrix(mWorld, *pNode);
PVRTMat4 mWorldView = mView * mWorld;
// Bind semantics
const CPVRTArray<SPVRTPFXUniform>& Uniforms = pEffect->GetUniformArray();
for(unsigned int j = 0; j < Uniforms.GetSize(); ++j)
{
switch(Uniforms[j].nSemantic)
{
case ePVRTPFX_UsPOSITION:
{
glVertexAttribPointer(Uniforms[j].nLocation, 3, GL_FLOAT, GL_FALSE, pMesh->sVertex.nStride, pMesh->sVertex.pData);
glEnableVertexAttribArray(Uniforms[j].nLocation);
}
break;
case ePVRTPFX_UsNORMAL:
{
glVertexAttribPointer(Uniforms[j].nLocation, 3, GL_FLOAT, GL_FALSE, pMesh->sNormals.nStride, pMesh->sNormals.pData);
glEnableVertexAttribArray(Uniforms[j].nLocation);
}
break;
case ePVRTPFX_UsUV:
{
glVertexAttribPointer(Uniforms[j].nLocation, 2, GL_FLOAT, GL_FALSE, pMesh->psUVW[0].nStride, pMesh->psUVW[0].pData);
glEnableVertexAttribArray(Uniforms[j].nLocation);
}
break;
case ePVRTPFX_UsMATERIALCOLORDIFFUSE:
{
if (pMaterial)
glUniform4f(Uniforms[j].nLocation, pMaterial->pfMatDiffuse[0], pMaterial->pfMatDiffuse[1], pMaterial->pfMatDiffuse[2], 1.0f);
}
break;
case ePVRTPFX_UsWORLDVIEWPROJECTION:
{
PVRTMat4 mWorldViewProj = mProjection * mWorldView;
glUniformMatrix4fv(Uniforms[j].nLocation, 1, GL_FALSE, mWorldViewProj.f);
}
break;
case ePVRTPFX_UsWORLDI:
{
PVRTMat3 mWorldI3x3(mWorld.inverse());
glUniformMatrix3fv(Uniforms[j].nLocation, 1, GL_FALSE, mWorldI3x3.f);
}
break;
case ePVRTPFX_UsWORLDVIEWIT:
{
PVRTMat3 mWorldViewIT3x3(mWorldView.inverse().transpose());
glUniformMatrix3fv(Uniforms[j].nLocation, 1, GL_FALSE, mWorldViewIT3x3.f);
}
break;
case ePVRTPFX_UsTEXTURE:
{
// Set the sampler variable to the texture unit
glUniform1i(Uniforms[j].nLocation, Uniforms[j].nIdx);
}
break;
case ePVRTPFX_UsLIGHTPOSWORLD:
{
glUniform3fv(Uniforms[j].nLocation, 1, m_vLightPosition.ptr());
}
break;
case eCUSTOMSEMANTIC_SHADOWTRANSMATRIX:
//.........这里部分代码省略.........
示例13: RenderScene
/*!****************************************************************************
@Function RenderScene
@Return bool true if no error occurred
@Description Main rendering loop function of the program. The shell will
call this function every frame.
eglSwapBuffers() will be performed by PVRShell automatically.
PVRShell will also manage important OS events.
Will also manage relevant OS events. The user has access to
these events through an abstraction layer provided by PVRShell.
******************************************************************************/
bool OGLESPVRScopeRemote::RenderScene()
{
CPPLProcessingScoped PPLProcessingScoped(m_psSPSCommsData,
__FUNCTION__, static_cast<unsigned int>(strlen(__FUNCTION__)), m_i32FrameCounter);
if(m_psSPSCommsData)
{
// mark every N frames
if(!(m_i32FrameCounter % 100))
{
char buf[128];
const int nLen = sprintf(buf, "frame %u", m_i32FrameCounter);
m_bCommsError |= !pplSendMark(m_psSPSCommsData, buf, nLen);
}
// Check for dirty items
m_bCommsError |= !pplSendProcessingBegin(m_psSPSCommsData, "dirty", static_cast<unsigned int>(strlen("dirty")), m_i32FrameCounter);
{
unsigned int nItem, nNewDataLen;
const char *pData;
while(pplLibraryDirtyGetFirst(m_psSPSCommsData, &nItem, &nNewDataLen, &pData))
{
PVRShellOutputDebug("dirty item %u %u 0x%08x\n", nItem, nNewDataLen, pData);
switch(nItem)
{
case 0:
if(nNewDataLen == sizeof(SSPSCommsLibraryTypeFloat))
{
const SSPSCommsLibraryTypeFloat * const psData = (SSPSCommsLibraryTypeFloat*)pData;
m_fMinThickness = psData->fCurrent;
}
break;
case 1:
if(nNewDataLen == sizeof(SSPSCommsLibraryTypeFloat))
{
const SSPSCommsLibraryTypeFloat * const psData = (SSPSCommsLibraryTypeFloat*)pData;
m_fMaxVariation = psData->fCurrent;
}
break;
}
}
}
m_bCommsError |= !pplSendProcessingEnd(m_psSPSCommsData);
}
if (m_psSPSCommsData)
{
m_bCommsError |= !pplSendProcessingBegin(m_psSPSCommsData, "draw", static_cast<unsigned int>(strlen("draw")), m_i32FrameCounter);
}
// Clear the color and depth buffer
glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT);
// Loads the projection matrix
glMatrixMode(GL_PROJECTION);
glLoadMatrixf(m_mProjection.f);
// Specify the modelview matrix
PVRTMat4 mModel;
SPODNode& Node = m_Scene.pNode[0];
m_Scene.GetWorldMatrix(mModel, Node);
// Rotate and Translate the model matrix
m_fAngleY += (2*PVRT_PIf/60)/7;
// Set model view projection matrix
PVRTMat4 mModelView;
mModelView = m_mView * PVRTMat4::RotationY(m_fAngleY) * mModel;
glMatrixMode(GL_MODELVIEW);
glLoadMatrixf(mModelView.f);
/*
Load the light direction from the scene if we have one
*/
// Enables lighting. See BasicTnL for a detailed explanation
glEnable(GL_LIGHTING);
glEnable(GL_LIGHT0);
// Set light direction
PVRTVec4 vLightDirModel;
vLightDirModel = mModel.inverse() * PVRTVec4(1, 1, 1, 0);
glLightfv(GL_LIGHT0, GL_POSITION, (float*)&vLightDirModel.x);
// Enable the vertex position attribute array
glEnableClientState(GL_VERTEX_ARRAY);
// bind the texture
//.........这里部分代码省略.........
示例14: if
void OGLES2FilmTV::DrawPODScene(PVRTMat4 &mViewProjection, bool bDrawCamera)
{
// Clear the colour and depth buffer
glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT);
// Get the position of the first light from the scene.
PVRTVec4 vLightPosition = m_Scene.GetLightPosition(0);
for(unsigned int i = 0; i < m_Scene.nNumMeshNode; ++i)
{
SPODNode& Node = m_Scene.pNode[i];
// Get the node model matrix
PVRTMat4 mWorld = m_Scene.GetWorldMatrix(Node);
if(i == g_ui32CameraMesh)
{
if(!bDrawCamera)
continue;
// Rotate camera model
mWorld = m_MiniCamView.inverse() * mWorld;
}
else if(i == g_ui32TvScreen) // If we're drawing the TV screen change to the black and white shader
{
glUseProgram(m_BWShaderProgram.uiId);
}
// Pass the model-view-projection matrix (MVP) to the shader to transform the vertices
PVRTMat4 mModelView, mMVP;
mMVP = mViewProjection * mWorld;
glUniformMatrix4fv(m_ShaderProgram.uiMVPMatrixLoc, 1, GL_FALSE, mMVP.f);
// Pass the light position in model space to the shader
PVRTVec4 vLightPos;
vLightPos = mWorld.inverse() * vLightPosition;
glUniform3fv(m_ShaderProgram.uiLightPosLoc, 1, &vLightPos.x);
// Load the correct texture using our texture lookup table
GLuint uiTex = 0;
if(Node.nIdxMaterial != -1)
{
if(m_bFBOsCreated && Node.nIdxMaterial == m_uiTVScreen && m_i32Frame != 0)
uiTex = m_uiTexture[1 - m_i32CurrentFBO];
else
uiTex = m_puiTextureIDs[Node.nIdxMaterial];
}
glBindTexture(GL_TEXTURE_2D, uiTex);
/*
Now that the model-view matrix is set and the materials ready,
call another function to actually draw the mesh.
*/
DrawMesh(Node.nIdx);
if(i == g_ui32TvScreen)
{
// Change back to the normal shader after drawing the g_ui32TvScreen
glUseProgram(m_ShaderProgram.uiId);
}
}
}
示例15: DrawModel
/*******************************************************************************
* Function Name : DrawModel
* Description : Draws the model
*******************************************************************************/
void OGLESSkinning::DrawModel()
{
//Set the frame number
m_Scene.SetFrame(m_fFrame);
// Enable lighting
glEnable(GL_LIGHTING);
// Enable States
glEnableClientState(GL_VERTEX_ARRAY);
glEnableClientState(GL_NORMAL_ARRAY);
glEnableClientState(GL_TEXTURE_COORD_ARRAY);
//Iterate through all the mesh nodes in the scene
for(int iNode = 0; iNode < (int)m_Scene.nNumMeshNode; ++iNode)
{
//Get the mesh node.
SPODNode* pNode = &m_Scene.pNode[iNode];
//Get the mesh that the mesh node uses.
SPODMesh* pMesh = &m_Scene.pMesh[pNode->nIdx];
// bind the VBO for the mesh
glBindBuffer(GL_ARRAY_BUFFER, m_puiVbo[pNode->nIdx]);
// bind the index buffer, won't hurt if the handle is 0
glBindBuffer(GL_ELEMENT_ARRAY_BUFFER, m_puiIndexVbo[pNode->nIdx]);
// Loads the correct texture using our texture lookup table
if(pNode->nIdxMaterial == -1)
glBindTexture(GL_TEXTURE_2D, 0); // It has no pMaterial defined. Use blank texture (0)
else
glBindTexture(GL_TEXTURE_2D, m_puiTextures[pNode->nIdxMaterial]);
//If the mesh has bone weight data then we must be skinning.
bool bSkinning = pMesh->sBoneWeight.n != 0;
if(bSkinning)
{
//If we are skinning then enable the relevant states.
glEnableClientState(GL_MATRIX_INDEX_ARRAY_OES);
glEnableClientState(GL_WEIGHT_ARRAY_OES);
}
else
{
// If we're not using matrix palette then get the world matrix for the mesh
// and transform the model view matrix by it.
PVRTMat4 worldMatrix;
m_Scene.GetWorldMatrix(worldMatrix, *pNode);
//Push the modelview matrix
glPushMatrix();
glMultMatrixf(worldMatrix.f);
}
// Set Data Pointers
// Used to display non interleaved geometry
glVertexPointer(pMesh->sVertex.n, GL_FLOAT, pMesh->sVertex.nStride, pMesh->sVertex.pData);
glNormalPointer(GL_FLOAT, pMesh->sNormals.nStride, pMesh->sNormals.pData);
glTexCoordPointer(pMesh->psUVW[0].n, GL_FLOAT, pMesh->psUVW[0].nStride, pMesh->psUVW[0].pData);
if(bSkinning)
{
//Set up the indexes into the matrix palette.
m_Extensions.glMatrixIndexPointerOES(pMesh->sBoneIdx.n, GL_UNSIGNED_BYTE, pMesh->sBoneIdx.nStride, pMesh->sBoneIdx.pData);
m_Extensions.glWeightPointerOES(pMesh->sBoneWeight.n, GL_FLOAT, pMesh->sBoneWeight.nStride, pMesh->sBoneWeight.pData);
}
// Draw
/*
2 variables used by the indexed triangle-strip version of the drawing code that is included as an
example but is unused as the .pod file supplied contains indexed triangle-list data.
*/
int i32Strip = 0;
int i32Offset = 0;
/*
As we are using bone batching we don't want to draw all the faces contained within pMesh at once, we only
want to draw the ones that are in the current batch. To do this we loop through the batches and pass to the
the draw call the offser to the start of the current batch of triangles
(pMesh->sBoneBatches.pnBatchOffset[i32Batch]) and the total number of triangles to draw (i32Tris)
*/
for(int i32Batch = 0; i32Batch < (bSkinning ? pMesh->sBoneBatches.nBatchCnt : 1); ++i32Batch)
{
// If the mesh is used for skining then set up the matrix palettes.
if(bSkinning)
{
//Enable the matrix palette extension
glEnable(GL_MATRIX_PALETTE_OES);
/*
Enables the matrix palette stack extension, and apply subsequent
matrix operations to the matrix palette stack.
*/
//.........这里部分代码省略.........