Problem with per-vertex normals

Hello, I’m a complete beginner with OpenGL so probably my problem is quite simple. So, I’m trying to get some interleaved vertex data to render, everything seems fine, except the normals, which are completely messed up:

Here is my code:

glGenVertexArraysOES(1, &vao);
    glBindVertexArrayOES(vao);
    
    // Create and bind a BO for vertex data
    GLuint vbuffer;
    glGenBuffers(1, &vbuffer);
    glBindBuffer(GL_ARRAY_BUFFER, vbuffer);    
    // copy data into the buffer object
    glBufferData(GL_ARRAY_BUFFER, sizeof(TexturedVertexData3D)*verticesNum, vertices, GL_STATIC_DRAW);
    
    GLuint ibuffer;
    glGenBuffers(1, &ibuffer);
    glBindBuffer(GL_ELEMENT_ARRAY_BUFFER, ibuffer);    
    // copy data into the buffer object
    glBufferData(GL_ELEMENT_ARRAY_BUFFER, indicesNum * sizeof(GLubyte), indices, GL_STATIC_DRAW);
    
    
    static const GLfloat color[] = {
        0.8f, 0.8f, 0.8f, 1.0f
    };
    
    glVertexAttrib4fv(GLKVertexAttribColor, color);
    
    glEnableVertexAttribArray(GLKVertexAttribPosition);
    glVertexAttribPointer(GLKVertexAttribPosition, 3, GL_FLOAT, GL_FALSE, sizeof(TexturedVertexData3D), (void *)offsetof(TexturedVertexData3D, vertex));
    
    glEnableVertexAttribArray(GLKVertexAttribNormal);
    glVertexAttribPointer(GLKVertexAttribNormal, 3, GL_FLOAT, GL_FALSE, sizeof(TexturedVertexData3D), (void *)offsetof(TexturedVertexData3D, normal));
    
    // At this point the VAO is set up with two vertex attributes
    // referencing the same buffer object, and another buffer object
    // as source for index data. We can now unbind the VAO, go do
    // something else, and bind it again later when we want to render
    // with it.
    
    glBindVertexArrayOES(0);
    glDisableVertexAttribArray(GLKVertexAttribPosition);
    glDisableVertexAttribArray(GLKVertexAttribNormal);



//render

    glBindVertexArrayOES(vao);
    glDrawElements(GL_TRIANGLES, indicesNum, GL_UNSIGNED_BYTE, 0);
    
    glBindVertexArrayOES(0);
    glDisableVertexAttribArray(GLKVertexAttribPosition);
    glDisableVertexAttribArray(GLKVertexAttribNormal);

Also, here are the vertices data I use. Should be a icosahedron. I interleave those later, but I’m certain that’s not the problem so I won’t post that code here (needs some clean-up to be readable).

static const Vertex3D verticesData[]= {
    {0, -0.525731, 0.850651},             // vertices[0]
    {0.850651, 0, 0.525731},              // vertices[1]
    {0.850651, 0, -0.525731},             // vertices[2]
    {-0.850651, 0, -0.525731},            // vertices[3]
    {-0.850651, 0, 0.525731},             // vertices[4]
    {-0.525731, 0.850651, 0},             // vertices[5]
    {0.525731, 0.850651, 0},              // vertices[6]
    {0.525731, -0.850651, 0},             // vertices[7]
    {-0.525731, -0.850651, 0},            // vertices[8]
    {0, -0.525731, -0.850651},            // vertices[9]
    {0, 0.525731, -0.850651},             // vertices[10]
    {0, 0.525731, 0.850651}               // vertices[11]
};

static const Color3D colorsData[] = {
    {1.0, 0.0, 0.0, 1.0},
    {1.0, 0.5, 0.0, 1.0},
    {1.0, 1.0, 0.0, 1.0},
    {0.5, 1.0, 0.0, 1.0},
    {0.0, 1.0, 0.0, 1.0},
    {0.0, 1.0, 0.5, 1.0},
    {0.0, 1.0, 1.0, 1.0},
    {0.0, 0.5, 1.0, 1.0},
    {0.0, 0.0, 1.0, 1.0},
    {0.5, 0.0, 1.0, 1.0},
    {1.0, 0.0, 1.0, 1.0},
    {1.0, 0.0, 0.5, 1.0}
};

static const GLubyte icosahedronFacesData[] = {
    1, 2, 6,
    1, 7, 2,
    3, 4, 5,
    4, 3, 8,
    6, 5, 11,
    5, 6, 10,
    9, 10, 2,
    10, 9, 3,
    7, 8, 9,
    8, 7, 0,
    11, 0, 1,
    0, 11, 4,
    6, 2, 10,
    1, 6, 11,
    3, 5, 10,
    5, 4, 11,
    2, 7, 9,
    7, 1, 0,
    3, 9, 8,
    4, 8, 0,
};

static const Vector3D normalsData[] = {
    {0.000000, -0.417775, 0.675974},
    {0.675973, 0.000000, 0.417775},
    {0.675973, -0.000000, -0.417775},
    {-0.675973, 0.000000, -0.417775},
    {-0.675973, -0.000000, 0.417775},
    {-0.417775, 0.675974, 0.000000},
    {0.417775, 0.675973, -0.000000},
    {0.417775, -0.675974, 0.000000},
    {-0.417775, -0.675974, 0.000000},
    {0.000000, -0.417775, -0.675973},
    {0.000000, 0.417775, -0.675974},
    {0.000000, 0.417775, 0.675973},
};

Lastly, the awful result:




Any help would be extra appreciated, as I’m struggling with this for some time now.

OK, I can’t believe how easy it was. I forgot to glEnable(GL_DEPTH_TEST); :|. Problem is fixed and works as it should now!