Tiny optimization - only init the vertex decoder on vertex type change

This commit is contained in:
Henrik Rydgård 2012-12-25 14:09:22 +01:00
parent 9dce8b09a4
commit 7c64f9c61e
2 changed files with 10 additions and 3 deletions

View file

@ -43,6 +43,7 @@ const GLuint glprim[8] = {
TransformDrawEngine::TransformDrawEngine(ShaderManager *shaderManager)
: numVerts(0),
lastVType(-1),
shaderManager_(shaderManager) {
decoded = new u8[65536 * 48];
decIndex = new u16[65536];
@ -618,8 +619,13 @@ void TransformDrawEngine::SubmitPrim(void *verts, void *inds, int prim, int vert
indexGen.SetIndex(numVerts);
int indexLowerBound, indexUpperBound;
// First, decode the verts and apply morphing
dec.SetVertexType(gstate.vertType);
// If vtype has changed, setup the vertex decoder.
// TODO: Simply cache the setup decoders instead.
if (gstate.vertType != lastVType) {
dec.SetVertexType(gstate.vertType);
lastVType = gstate.vertType;
}
// Decode the verts and apply morphing
dec.DecodeVerts(decoded + numVerts * (int)dec.GetDecVtxFmt().stride, verts, inds, prim, vertexCount, &indexLowerBound, &indexUpperBound);
numVerts += indexUpperBound - indexLowerBound + 1;

View file

@ -42,12 +42,13 @@ private:
// Vertex collector buffers
VertexDecoder dec;
u32 lastVType;
u8 *decoded;
u16 *decIndex;
TransformedVertex *transformed;
TransformedVertex *transformedExpanded;
// Other
ShaderManager *shaderManager_;
};