If my vertex attribute struct looks like this:
struct AttribData {
float position[3]; // 3 floats * 4 bytes = 12 bytes
float normal[3]; // 3 floats * 4 bytes = 12 bytes
}; // So a total of 24 bytes!
Any my VBO’s look like this:
GLuint indexBuffer;
glCreateBuffers ( 1, &indexBuffer );
glBindBuffer ( GL_ELEMENT_ARRAY_BUFFER, indexBuffer );
glBufferData ( GL_ELEMENT_ARRAY_BUFFER, shapes[0].mesh.indices.size() * sizeof(GLuint), shapes[0].mesh.indices.data (), GL_STATIC_DRAW );
// Model buffers
GLuint buffer;
glCreateBuffers ( 1, &buffer ); // Create a new VBO and use the variable to store the VBO id
glBindBuffer ( GL_ARRAY_BUFFER, buffer ); // Make the new VBO active
glBufferData ( GL_ARRAY_BUFFER, objectData.size() * sizeof(AttribData), &objectData[0], GL_STATIC_DRAW ); // Upload the vertex data to the video device
And my vertex VAO looks like this:
// Create a new VAO and use the variable to store the VAO id
GLuint vertexArray;
glCreateVertexArrays ( 1, &vertexArray );
// Setup the formats
glVertexArrayAttribFormat ( vertexArray, positionLocation, 3, GL_FLOAT, GL_FALSE, 0 );
glVertexArrayAttribFormat ( vertexArray, normalLocation, 3, GL_FLOAT, GL_FALSE, 12 );
// Setup the buffer sources
glVertexArrayElementBuffer ( vertexArray, indexBuffer );
glVertexArrayVertexBuffer ( vertexArray, positionLocation, buffer, 0, 24 ); // Start at byte 0 in the buffer, next set of positions are 24 bytes later
glVertexArrayVertexBuffer ( vertexArray, normalLocation, buffer, 12, 24 ); // Start at byte 12 in the buffer, next set of normals are 24 bytes later
// Link
glVertexArrayAttribBinding ( vertexArray, positionLocation, 0 );
glVertexArrayAttribBinding ( vertexArray, normalLocation, 1 );
// Enable
glEnableVertexArrayAttrib ( vertexArray, positionLocation );
glEnableVertexArrayAttrib ( vertexArray, normalLocation );
Then why does my monkey head look like it fell out of the ugly tree and hit every branch on the way down? Pic: http://screencast.com/t/hRHsAtwr