Hi all.
Having completed my D3D renderers for my engine, I turned my attention to OpenGL. I say that because it is relevant a bit later on when I set up the various matrices.
So, I’ve completed all the code that I thought I needed, but I’m getting a black screen, so I must be missing something. I check all my errors, but nothing is coming up. This is for the windows platform, and I’m not using GLEW or GLUT or any of that kind of stuff, just core functionality here.
I’m taking out all the error checking in this code to keep it as brief as possible. Can someone take a look through it and see if they see any issues?
I will start after I have set up the context. I think it’s working fine up to that point (BTW, I print out the OGL version both after setting up a dummy context and then setting up the real context, and I get 4.0.0 first, then 3.3.0 second (which is correct, I request a 3.3.0 context from wglChoosePixelFormatARB()).
The test geometry vertex and index data:
//geometry data
GLfloat BoxVerts[] = {(-0.6f, 0.8f, -0.6f), (0.6f, 0.8f, -0.6f), (0.6f, 0.8f, 0.6f), (-0.6f, 0.8f, 0.6f),
(-0.8f, -0.8f, -0.8f), (0.8f, -0.8f, -0.8f), (0.8f, -0.8f, 0.8f), (-0.8f, -0.8f, 0.8f)};
GLfloat BoxColors[] = {(1.0f, 1.0f, 1.0f), (0.0f, 1.0f, 1.0f), (1.0f, 0.0f, 1.0f), (1.0f, 1.0f, 0.0f),
(0.0f, 0.0f, 1.0f), (0.0f, 1.0f, 0.0f), (1.0f, 0.0f, 0.0f), (0.0f, 1.0f, 0.0f)};
unsigned int BoxIndices[] = {3,1,0, 2,1,3, 0,5,4, 1,5,0, 3,4,7, 0,4,3, 1,6,5, 2,6,1, 2,7,6, 3,7,2, 6,4,5, 7,4,6};
The shader setup
//create the shader objects, compile and link them
GLuint VertexShader;
GLuint FragmentShader;
GLuint ShaderProgram;
char *Log;
GLint Size;
VertexShader = glCreateShader(GL_VERTEX_SHADER);
glShaderSource(VertexShader, 1, &vString, NULL);
glCompileShader(VertexShader);
glGetShaderiv(VertexShader, GL_INFO_LOG_LENGTH, &Size);
Log = new char[Size];
glGetShaderInfoLog(VertexShader, Size, nullptr, Log);
OutputDebugStringA(Log);
delete Log;
FragmentShader = glCreateShader(GL_FRAGMENT_SHADER);
glShaderSource(FragmentShader, 1, &fString, NULL);
glCompileShader(FragmentShader);
glGetShaderiv(FragmentShader, GL_INFO_LOG_LENGTH, &Size);
Log = new char[Size];
glGetShaderInfoLog(FragmentShader, Size, nullptr, Log);
OutputDebugStringA(Log);
delete Log;
//create the program
ShaderProgram = glCreateProgram();
glAttachShader(ShaderProgram, VertexShader);
glAttachShader(ShaderProgram, FragmentShader);
//link and use the program
glLinkProgram(ShaderProgram);
glUseProgram(ShaderProgram);
Setting up the attribute buffers
//find the attribute locations
GLint PositionLocation;
GLint ColorLocation;
PositionLocation = glGetAttribLocation(ShaderProgram, "Position");
ColorLocation = glGetAttribLocation(ShaderProgram, "Color");
//create and bind a vertex array object
GLuint vao;
glGenVertexArrays(1, &vao);
glBindVertexArray(vao);
//create the attribute buffers
GLuint AttributeBuffer;
glGenBuffers(1, &AttributeBuffer);
glBindBuffer(GL_ARRAY_BUFFER, AttributeBuffer);
//fill the buffer
glBufferData(GL_ARRAY_BUFFER, sizeof(BoxVerts) + sizeof(BoxColors), nullptr, GL_STATIC_DRAW);
glBufferSubData(GL_ARRAY_BUFFER, 0, sizeof(BoxVerts), BoxVerts);
glBufferSubData(GL_ARRAY_BUFFER, sizeof(BoxVerts), sizeof(BoxColors), BoxColors);
//relate the buffer data to the attributes in the shaders
glEnableVertexAttribArray(PositionLocation);
glVertexAttribPointer(PositionLocation, 3, GL_FLOAT, GL_FALSE, 0, (const GLvoid *)0);
glEnableVertexAttribArray(ColorLocation);
glVertexAttribPointer(ColorLocation, 3, GL_FLOAT, GL_FALSE, 0, (const GLvoid *)sizeof(BoxVerts));
//create the index buffer
GLuint IndexBuffer;
glGenBuffers(1, &IndexBuffer);
glBindBuffer(GL_ELEMENT_ARRAY_BUFFER, IndexBuffer);
//fill the buffer
glBufferData(GL_ELEMENT_ARRAY_BUFFER, sizeof(BoxIndices), BoxIndices, GL_STATIC_DRAW);
setting the uniforms
//set the matrices
GLuint UniformWorldMatrix;
GLuint UniformViewMatrix;
GLuint UniformProjectionMatrix;
float WorldMatrix[] = {0.707f, 0.0f, 0.707f, 0.0f,
0.0f, 1.0f, 0.0f, 0.0f,
-0.707f, 0.0f, 0.707f, 0.0f,
0.0f, 0.0f, 0.0f, 1.0f};
float ViewMatrix[] = {1.0f, 0.0f, 0.0f, 0.0f,
0.0f, 1.0f, 0.0f, 0.0f,
0.0f, 0.0f, 1.0f, 10.0f,
0.0f, 0.0f, 0.0f, 1.0f};
float ProjectionMatrix[] = {4.0f, 0.0f, 0.0f, 0.0f,
0.0f, 4.0f, 0.0f, 0.0f,
0.0f, 0.0f, 1.0f, -1.0f,
0.0f, 0.0f, 1.0f, 0.0f};
UniformWorldMatrix = glGetUniformLocation(ShaderProgram, "WorldMatrix");
UniformViewMatrix = glGetUniformLocation(ShaderProgram, "ViewMatrix");
UniformProjectionMatrix = glGetUniformLocation(ShaderProgram, "ProjectionMatrix");
glUniformMatrix4fv(UniformWorldMatrix, 1, GL_FALSE, WorldMatrix);
glUniformMatrix4fv(UniformViewMatrix, 1, GL_FALSE, ViewMatrix);
glUniformMatrix4fv(UniformProjectionMatrix, 1, GL_FALSE, ProjectionMatrix);
Windows code
//pop up the window
ShowWindow(ControlWindowHandle, true);
while (true)
{
if (PeekMessage(&msg, NULL, 0, 0, PM_REMOVE))
{
if (msg.message == WM_QUIT)
{
break;
}
TranslateMessage(&msg);
DispatchMessage(&msg);
}
else
{
Sleep(1); //do not max out processor
glDrawElements(GL_TRIANGLES, 36, GL_UNSIGNED_INT, (GLvoid*)((char*)NULL));
}
}
return 0;
}
[EDIT: I broke up the wall of code a bit for easier reading. I also changed the last parameter of glDrawElements to 0 (I’ve seen conflicting reports on what to put in here, but I think 0 is correct, let me know if that’s wrong).]
Okay, a lot of code, I know, but hopefully it’s readable.
Alright, the hard coded matrices are the same ones I use from my D3D renderer (they are not hard coded there, but the values I use here are what they end up being). It occurs to me that this may not be right (there are handedness differences between OGL and D3D, right? Which would change the perspective matrix?). I can mess with that (although if anyone has a link somewhere that explains the differences and how they affect the matrices, that would be good).
The values for the box vertices/indeces are the same as from D3D, so I’m not concerned those are wrong.
The shaders are as such:
const char *vString = " \
#version 330
\
\
in vec3 Position; \
in vec3 Color; \
out vec3 FragColor; \
uniform mat4 WorldMatrix; \
uniform mat4 ViewMatrix; \
uniform mat4 ProjectionMatrix; \
\
void main () \
{ \
vec4 Vertex = vec4(Position, 1.0); \
gl_Position = (ProjectionMatrix * (WorldMatrix * ViewMatrix)) * Vertex; \
FragColor = Color; \
}";
const char *fString = " \
#version 330
\
\
in vec3 FragColor; \
out vec4 OutColor; \
\
void main () \
{ \
OutColor = vec4(FragColor, 1.0); \
}";
So, does anyone see what it is that I’m missing or messing up?