I’m using a POD file with shaders in a PFX file. I’ve got a model of a car, which I have a base texture applied. I am trying to apply some decals to the car, by blending another texture onto the base. I’m seriously having problems though as I can’t figure out how exactly to do this. I know it’s possible as the Alpha Blend example uses transparency in the 1st quad. The problem is, I can’t figure out how to accomplish the same technique using shaders that are defined in the pfx.
Here’s how my render mesh and draw mesh are defined:
- (void) renderMeshes:(SPODNode )pNode withIndex:(int)index andEffect:(GLuint)uiFXID
{
// Gets the node model matrix
PVRTMat4 mWorld;
PVRTMat4 mWorldView;
mWorld = _broncoScene.GetWorldMatrix(pNode);
mWorld = [self sizeTiresAndWheels:index axis:mWorld node:pNode withIndex:index];
mWorld = [self applySuspensionLift:index axis:mWorld];
SPODMaterial pMaterial = &_broncoScene.pMaterial[pNode->nIdxMaterial];
mWorldView = _viewMatrix * _rotationX * mWorld;
// Set the blend mode
// Based in the info stored in the material by PVRShaman.
// We check whether the blend mode is ‘opaque’ (ONE,ZERO).
// Otherwise we enable blending and set the corresponding operations.
if (_broncoScene.pMaterial[pNode->nIdxMaterial].eBlendSrcRGB == ePODBlendFunc_ONE && _broncoScene.pMaterial[pNode->nIdxMaterial].eBlendDstRGB == ePODBlendFunc_ZERO)
{
glDisable(GL_BLEND);
}
else
{
glEnable(GL_BLEND);
// glBlendFunc(GL_SRC_ALPHA, GL_ONE_MINUS_SRC_ALPHA);
glBlendFunc(_broncoScene.pMaterial[pNode->nIdxMaterial].eBlendSrcRGB, _broncoScene.pMaterial[pNode->nIdxMaterial].eBlendDstRGB);
}
// Now process PVRShaman semantics and set-up the associated uniforms.
const CPVRTArray& aUniforms = _pFX[uiFXID]->GetUniformArray();
for(unsigned int j = 0; j < aUniforms.GetSize(); ++j)
{
switch(aUniforms[j].nSemantic)
{
case ePVRTPFX_UsWORLDVIEWPROJECTION:
{
PVRTMat4 mWVP;
// Passes the world-view-projection matrix (WVP) to the shader to transform the vertices
mWVP = _projectionMatrix * mWorldView;
glUniformMatrix4fv(aUniforms[j].nLocation, 1, GL_FALSE, mWVP.f);
}
break;
case ePVRTPFX_UsWORLDVIEWIT:
{
PVRTMat4 mWorldViewI, mWorldViewIT;
// Passes the inverse transpose of the world-view matrix to the shader to transform the normals
mWorldViewI = mWorldView.inverse();
mWorldViewIT = mWorldViewI.transpose();
PVRTMat3 WorldViewIT = PVRTMat3(mWorldViewIT);
glUniformMatrix3fv(aUniforms[j].nLocation, 1, GL_FALSE, WorldViewIT.f);
}
break;
case ePVRTPFX_UsWORLDVIEW:
{
glUniformMatrix4fv(aUniforms[j].nLocation, 1, GL_FALSE, _viewMatrix.f);
}
break;
case ePVRTPFX_UsVIEWIT:
{
PVRTMat4 mViewI, mViewIT;
/ Passes the inverse transpose of the model-view matrix to the shader to transform the normals */
mViewI = _viewMatrix.inverse();
mViewIT= mViewI.transpose();
PVRTMat3 ViewIT = PVRTMat3(mViewIT);
glUniformMatrix3fv(aUniforms[j].nLocation, 1, GL_FALSE, ViewIT.f);
}
break;
case ePVRTPFX_UsLIGHTDIREYE:
{
//
test...
//
// Reads the light direction from the scene.
PVRTVec4 vLightDirection;
PVRTVec3 vPos;
// vLightDirection = _broncoScene.GetLightDirection(0);
vLightDirection = PVRTVec4(0, 0, 0, 0);
vLightDirection.x = -vLightDirection.x;
vLightDirection.y = -vLightDirection.y;
vLightDirection.z = -vLightDirection.z;
/*
Sets the w component to 0, so when passing it to glLight(), it is
considered as a directional light (as opposed to a spot light).
*/
vLightDirection.w = 0;
// Passes the light direction in eye space to the shader
PVRTVec4 vLightDirectionEyeSpace;
vLightDirectionEyeSpace = _viewMatrix * vLightDirection;
glUniform3f(aUniforms[j].nLocation, vLightDirectionEyeSpace.x, vLightDirectionEyeSpace.y, vLightDirectionEyeSpace.z);
}
break;
case eCUSTOMSEMANTIC_DIFFUSECOLOUR:
{
glUniform4f(aUniforms[j].nLocation, pMaterial->pfMatDiffuse[0], pMaterial->pfMatDiffuse[1], pMaterial->pfMatDiffuse[2], 1.0f);
}
break;
}
}
//set animation
const CGFloat* components = CGColorGetComponents([self bodyColor].CGColor);
glUniform4f(glGetUniformLocation(_pFX[uiFXID]->GetProgramHandle(), "diffuseColor"),components[0], components[1], components[2], 1);
/*
Now that the model-view matrix is set and the materials ready,
call another function to actually draw the mesh.
*/
[self drawMesh:index withEffect: _pFX[uiFXID]];
glBindBuffer(GL_ARRAY_BUFFER, 0);
}
- (void)drawMesh:(int)i32NodeIndex withEffect:(CPVRTPFXEffect *)pCurrentFX
{
// Get the mesh data from the POD file
int i32MeshIndex = _broncoScene.pNode[i32NodeIndex].nIdx;
SPODMesh* pMesh = &_broncoScene.pMesh[i32MeshIndex];
// bind the VBO for the mesh
glBindBuffer(GL_ARRAY_BUFFER, _puiVbo[i32MeshIndex]);
// bind the index buffer, won't hurt if the handle is 0
glBindBuffer(GL_ELEMENT_ARRAY_BUFFER, _puiIndexVbo[i32MeshIndex]);
const CPVRTArray& Uniforms = pCurrentFX->GetUniformArray();
// Set attributes and texture stages
// The vertex data is already loaded into an VBO as 'interleaved data' (e.g. x,y,z,nx,ny,nz,u0,v0,...).
// When using 'interleaved data' the last parameter in glVertexAttribPointer is just a numerical offset (in bytes) into the stream (e.g. 0, 12, 24).
// Our tools use pMesh->sVertex.pData, etc. to store this offset only when using interleaved data.
// If you do not want to use interleaved data you will need to create separated VBO from pMesh->sVertex.pData, pMesh->sNormals.pData, etc.
// and set the offset (last parameter) to 0
for(unsigned int j = 0; j < Uniforms.GetSize(); ++j)
{
switch(Uniforms[j].nSemantic)
{
case ePVRTPFX_UsPOSITION:
{
glVertexAttribPointer(Uniforms[j].nLocation, 3, GL_FLOAT, GL_FALSE, pMesh->sVertex.nStride, pMesh->sVertex.pData);
glEnableVertexAttribArray(Uniforms[j].nLocation);
}
break;
case ePVRTPFX_UsNORMAL:
{
glVertexAttribPointer(Uniforms[j].nLocation, 3, GL_FLOAT, GL_FALSE, pMesh->sNormals.nStride, pMesh->sNormals.pData);
glEnableVertexAttribArray(Uniforms[j].nLocation);
}
break;
case ePVRTPFX_UsUV:
{
glVertexAttribPointer(Uniforms[j].nLocation, 2, GL_FLOAT, GL_FALSE, pMesh->psUVW[0].nStride, pMesh->psUVW[0].pData);
glEnableVertexAttribArray(Uniforms[j].nLocation);
}
break;
case ePVRTPFX_UsTEXTURE:
{
// Set the sampler variable to the texture unit/stage 0,1,2,etc.
glUniform1i(Uniforms[j].nLocation, Uniforms[j].nIdx);
}
break;
case eCUSTOMSEMANTIC_TANGENT:
{
glVertexAttribPointer(Uniforms[j].nLocation, 3, GL_FLOAT, GL_FALSE, pMesh->sTangents.nStride, pMesh->sTangents.pData);
}
break;
}
}
// Indexed Triangle list
// Note: if you export you model and want to use this code, please export it as 'interleaved' triangle list.
glDrawElements(GL_TRIANGLES, pMesh->nNumFaces*3, GL_UNSIGNED_SHORT, 0);
// Safely disable the vertex attribute arrays
for(unsigned int j = 0; j < Uniforms.GetSize(); ++j)
{
switch(Uniforms[j].nSemantic)
{
case ePVRTPFX_UsPOSITION:
{
glDisableVertexAttribArray(Uniforms[j].nLocation);
}
break;
case ePVRTPFX_UsNORMAL:
{
glDisableVertexAttribArray(Uniforms[j].nLocation);
}
break;
case ePVRTPFX_UsUV:
{
glDisableVertexAttribArray(Uniforms[j].nLocation);
}
break;
}
}
// Un-bind our vertex and index buffers.
glBindBuffer(GL_ARRAY_BUFFER, 0);
glBindBuffer(GL_ELEMENT_ARRAY_BUFFER, 0);
}