Drawing indeces invalid order

Hello,

I have this issue with my OpenGL backend when I replaced glDrawArrays with glDrawElements, vertices seem to be drawn in the invalid order. I spend a few hours debugging trying to locate the issue and yet I can’t spot anything:

Normal rendering:
[ATTACH=CONFIG]1388[/ATTACH]
Wireframe:
[ATTACH=CONFIG]1389[/ATTACH]

This is the default mesh (box) info from the assets manager (I use assimp based loader which has the same problem):

  static const std::vector<glm::vec3> vertices = {
    {-1.0f, -1.0f, 1.0f}, {1.0f, -1.0f, 1.0f}, {1.0f, 1.0f, 1.0f}, {-1.0f, 1.0f, 1.0f},
    {-1.0f, -1.0f, -1.0f}, {1.0f, -1.0f, -1.0f}, {1.0f, 1.0f, -1.0f}, {-1.0f, 1.0f, -1.0f}
  };

  static const std::vector<glm::vec3> normals = {
    {-1.0f, -1.0f, 1.0f}, {1.0f, -1.0f, 1.0f}, {1.0f, 1.0f, 1.0f}, {-1.0f, 1.0f, 1.0f},
    {-1.0f, -1.0f, -1.0f}, {1.0f, -1.0f, -1.0f},{1.0f, 1.0f, -1.0f}, {-1.0f, 1.0f, -1.0f}
  };

  static const std::vector<unsigned> indices = {
    0, 1, 2, 2, 3, 0, 3, 2, 6, 6, 7, 3, 7, 6, 5, 5, 4, 7,
    4, 0, 3, 3, 7, 4, 0, 1, 5, 5, 4, 0, 1, 5, 6, 6, 2, 1
  };

  m_meshes["Default"] = std::make_shared<Mesh>(vertices, normals, indices);

The data are uploaded to the GPU in the mesh class:

  SDL_assert(vertices.size() == normals.size());

  struct VertexData
  {
    GLfloat vertex[3];
    GLfloat normal[3];
  };

  std::vector<VertexData> verticesData(vertices.size());

  for (size_t i = 0; i < vertices.size(); ++i)
  {
    verticesData[i].vertex[0] = vertices[i].x;
    verticesData[i].vertex[1] = vertices[i].y;
    verticesData[i].vertex[2] = vertices[i].z;

    verticesData[i].normal[0] = normals[i].x;
    verticesData[i].normal[1] = normals[i].y;
    verticesData[i].normal[2] = normals[i].z;
  }

  m_elementsCount = indices.size();

  glGenVertexArrays(1, &m_vertexArray);
  SDL_assert(glGetError() == GL_NO_ERROR);

  glGenBuffers(1, &m_vertexBuffer);
  SDL_assert(glGetError() == GL_NO_ERROR);

  glGenBuffers(1, &m_arrayElements);
  SDL_assert(glGetError() == GL_NO_ERROR);

  glBindVertexArray(m_vertexArray);
  SDL_assert(glGetError() == GL_NO_ERROR);
  {
    // Vertices and normals
    glBindBuffer(GL_ARRAY_BUFFER, m_vertexBuffer);
    SDL_assert(glGetError() == GL_NO_ERROR);
    {
      glBufferData(GL_ARRAY_BUFFER, verticesData.size() * sizeof(verticesData[0]), verticesData.data(), GL_STATIC_DRAW);
      SDL_assert(glGetError() == GL_NO_ERROR);
    }

    // Configure the buffer layout for the vertices
    glEnableVertexAttribArray(0);
    SDL_assert(glGetError() == GL_NO_ERROR);
    glVertexAttribPointer(0, 3, GL_FLOAT, GL_FALSE, sizeof(verticesData), reinterpret_cast<void *>(offsetof(VertexData, vertex)));
    SDL_assert(glGetError() == GL_NO_ERROR);

    // Configure the buffer layout for the normals
    glEnableVertexAttribArray(1);
    SDL_assert(glGetError() == GL_NO_ERROR);
    glVertexAttribPointer(1, 3, GL_FLOAT, GL_FALSE, sizeof(verticesData), reinterpret_cast<void *>(offsetof(VertexData, normal)));
    SDL_assert(glGetError() == GL_NO_ERROR);

    // Indices
    glBindBuffer(GL_ELEMENT_ARRAY_BUFFER, m_arrayElements);
    SDL_assert(glGetError() == GL_NO_ERROR);
    {
      glBufferData(GL_ELEMENT_ARRAY_BUFFER, indices.size() * sizeof(unsigned), indices.data(), GL_STATIC_DRAW);
      SDL_assert(glGetError() == GL_NO_ERROR);
    }
  }
  glBindVertexArray(0);
  SDL_assert(glGetError() == GL_NO_ERROR);

This is where I draw the mesh in the mesh renderer:

    glBindVertexArray(m_mesh->m_vertexArray);
    SDL_assert(glGetError() == GL_NO_ERROR);
    {
      glDrawElements(GL_TRIANGLES, m_mesh->m_elementsCount, GL_UNSIGNED_INT, reinterpret_cast<void*>(0));
      SDL_assert(glGetError() == GL_NO_ERROR);
    }
    glBindVertexArray(0);
    SDL_assert(glGetError() == GL_NO_ERROR);

The depth testing is enabled, and the shader is fine (I never modify it since I used glDrawElements).

OK… I found the issue, vertexData should’ve been VertexData (the type):

    // Configure the buffer layout for the vertices
    glEnableVertexAttribArray(0);
    SDL_assert(glGetError() == GL_NO_ERROR);
    glVertexAttribPointer(0, 3, GL_FLOAT, GL_FALSE, sizeof([b]vertexData[/b]), reinterpret_cast<void *>(offsetof(VertexData, vertex)));
    SDL_assert(glGetError() == GL_NO_ERROR);

    // Configure the buffer layout for the normals
    glEnableVertexAttribArray(1);
    SDL_assert(glGetError() == GL_NO_ERROR);
    glVertexAttribPointer(1, 3, GL_FLOAT, GL_FALSE, sizeof([b]vertexData[/b]), reinterpret_cast<void *>(offsetof(VertexData, normal)));
    SDL_assert(glGetError() == GL_NO_ERROR);

OH man, I’m going to bed :doh: