OpenGL does not read the colour correctly from my vertex array

With OpenGL shaders, I want to draw a triangle on the screen, where the vertex colours are specified in the data structure alongside the vertex coordinates. The structure has 7 floats for each vertex – 3 for coordinates, followed by 4 for colour:


    static std::vector<GLfloat> vertices = {
            -1.0f, -1.0f, 0.0f, 0.0f, 0.0f, 1.0f, 1.0f,
            1.0f, -1.0f, 0.0f, 0.0f, 0.0f, 1.0f, 1.0f,
            0.0f, 1.0f, 0.0f, 0.0f, 0.0f, 1.0f, 1.0f
        };

I then tell OpenGL how to interpret this structure by using glVertexAttribPointer():


    // Vertex coordinates
    glVertexAttribPointer(0, 3, GL_FLOAT, GL_FALSE, 7 * sizeof(float), 0);
    // Vertex colour
    glVertexAttribPointer(1, 4, GL_FLOAT, GL_FALSE, 7 * sizeof(float), (void*)(3 * sizeof(float)));

And then tell my vertex shader to read the coordinates and colour:


    layout (location = 0) in vec3 position;
    layout (location = 1) in vec4 vertex_colour;

However, no matter what values I use for the colour component, the triangle is always drawn in red. Changing the coordinates in the structure affects the image as expected, but changing the colour in the structure does nothing.

I believe that this is a problem with my C++ code, rather than the shader code, because I have debugged the shader and it is always reading (1.0, 0.0, 0.0, 1.0) for the colour, even though I am passing it (0.0, 0.0, 1.0, 1.0) for each vertex.

Any ideas on what I’m doing wrong?

Thanks :slight_smile:

Here is my complete code:

    #include <iostream>
    #include <string>
    #include <fstream>
    #include <sstream>
    #include <cmath>
    #include <assert.h>
    #include <vector>
    
    #include <GL/glew.h>
    #include <GL/glut.h>
    #include <glm/glm.hpp>
    #include <glm/gtc/type_ptr.hpp>
    #include <glm/gtc/matrix_transform.hpp>
    #include <glm/ext.hpp>
    
    GLuint buffer;
    GLuint projection_matrix_location;
    GLuint view_matrix_location;
    glm::mat4 view_matrix;
    glm::mat4 projection_matrix;
    int num_vertices = 0;
    
    static void RenderScene()
    {
        // Clear the buffers.
        glClear(GL_COLOR_BUFFER_BIT);
        glClear(GL_DEPTH_BUFFER_BIT);
    
        // Set the matrices
        glUniformMatrix4fv(projection_matrix_location, 1, GL_FALSE, glm::value_ptr(projection_matrix));
        glUniformMatrix4fv(view_matrix_location, 1, GL_FALSE, glm::value_ptr(view_matrix));
    
        // Specify how to read the vertex buffer
        glEnableVertexAttribArray(0);
        glBindBuffer(GL_ARRAY_BUFFER, buffer);
        // Vertex coordinates
        glVertexAttribPointer(0, 3, GL_FLOAT, GL_FALSE, 7 * sizeof(float), 0);
        // Vertex colour
        glVertexAttribPointer(1, 4, GL_FLOAT, GL_FALSE, 7 * sizeof(float), (void*)(3 * sizeof(float)));
    
        // Draw the vertex buffer
        glDrawArrays(GL_TRIANGLES, 0, num_vertices);
        glDisableVertexAttribArray(0);
    
        // Swap the buffers
        glutSwapBuffers();
    }
    
    static void MakeBuffers()
    {
        // Set the vertices
        static std::vector<GLfloat> vertices = {
            -1.0f, -1.0f, 0.0f, 0.0f, 0.0f, 1.0f, 1.0f,
            1.0f, -1.0f, 0.0f, 0.0f, 0.0f, 1.0f, 1.0f,
            0.0f, 1.0f, 0.0f, 0.0f, 0.0f, 1.0f, 1.0f
        };
        num_vertices = (1.0 / 7.0) * vertices.size();
    
        // Fill the buffer with the vertices
        glGenBuffers(1, &buffer);
        glBindBuffer(GL_ARRAY_BUFFER, buffer);
        glBufferData(GL_ARRAY_BUFFER, num_vertices * 7 * sizeof(GL_FLOAT), &vertices[0], GL_STATIC_DRAW);
    }
    
    
    static GLuint MakeShader(GLenum shader_type, std::string shader_filename)
    {
        // Load the source code
        std::ifstream file_in;
        file_in.open(&shader_filename[0]);
        std::stringstream file_stream;
        file_stream << file_in.rdbuf();
        std::string file_string = file_stream.str();
        const GLchar* ptr_file_string = &file_string[0];
        const GLchar** ptr_file_strings = &ptr_file_string;
        int string_lengths[] = {(int)file_string.length()};
    
        // Compile the shader
        GLuint shader = glCreateShader(shader_type);
        glShaderSource(shader, 1, ptr_file_strings, &string_lengths[0]);
        glCompileShader(shader);
    
        // Check
        GLint is_success;
        glGetShaderiv(shader, GL_COMPILE_STATUS, &is_success);
        if (!is_success)
        {
            std::cerr << "Error" << std::endl;
            return -1;
        }
    
        return shader;
    }
    
    
    static void MakeShaderProgram()
    {
        // Make the shaders
        GLuint vertex_shader = MakeShader(GL_VERTEX_SHADER, "../src/vertex-shader.glsl");
        GLuint fragment_shader = MakeShader(GL_FRAGMENT_SHADER, "../src/fragment-shader.glsl");
    
        // Create the program
        GLuint program = glCreateProgram();
        glAttachShader(program, vertex_shader);
        glAttachShader(program, fragment_shader);
        glLinkProgram(program);
    
        // Check
        GLint is_success = 0;
        glGetProgramiv(program, GL_LINK_STATUS, &is_success);
        if (!is_success)
        {
            std::cout << "Error" << std::endl;
            return;
        }
        glValidateProgram(program);
        glGetProgramiv(program, GL_VALIDATE_STATUS, &is_success);
        if (!is_success)
        {
            std::cout << "Error" << std::endl;
            return;
        }
    
        // Use the program
        glUseProgram(program);
    
        // Get the location of the uniform variables
        view_matrix_location = glGetUniformLocation(program, "view_matrix");
        assert(view_matrix_location != 0xFFFFFFFF);
        projection_matrix_location = glGetUniformLocation(program, "projection_matrix");
        assert(projection_matrix_location != 0xFFFFFFFF);
    }
    
    
    int main(int argc, char** argv)
    {
        // Initialize GLUT
        glutInit(&argc, argv);
    
        // Configure some GLUT display options:
        glutInitDisplayMode(GLUT_DOUBLE | GLUT_RGBA);
    
        // Specify the GLUT window parameters and create the window
        glutInitWindowSize(1000, 750);
        glutInitWindowPosition(500, 200);
        glutCreateWindow("Test");
    
        // Specify the display callback
        glutDisplayFunc(RenderScene);
    
        // Initialize GLEW, which must be done after GLUT is initialized.
        GLenum glut_result = glewInit();
        if (glut_result != GLEW_OK)
        {
            std::cout << "Error" << std::endl;
            return -1;
        }
    
        // Set the clear colour.
        glClearColor(0.5f, 0.5f, 0.5f, 0.0f);
    
        // Enable depth testing so that only the nearest vertex is sent to the colour buffer (also needed to read the depth of each pixel using glReadPixels())).
        glEnable(GL_DEPTH_TEST);
    
        // Make the vertex and index buffers.
        MakeBuffers();
    
        // Make the shader program.
        MakeShaderProgram();
    
        // Create the view matrix.
        glm::vec3 eye(0.0f, 0.0f, -3.0f);
        glm::vec3 centre(0.0f, 0.0f, 0.0f);
        glm::vec3 up(0.0f, 1.0f, 0.0f);
        view_matrix = glm::lookAt(eye, centre, up);
    
        // Create the projection matrix.
        float fov_y = 45.0;
        float aspect_ratio = 1.5;
        float near_clip = 1;
        float far_clip = 1000;
        projection_matrix = glm::perspective(fov_y, aspect_ratio, near_clip, far_clip);
    
        // Start the GLUT internal loop.
        glutMainLoop();
    }

And here is my shader code:

// Vertex shader

#version 330

layout (location = 0) in vec3 position;
layout (location = 1) in vec4 vertex_colour;

uniform mat4 projection_matrix;
uniform mat4 view_matrix;

out vec4 frag_colour;

void main()
{
    gl_Position = projection_matrix * view_matrix * vec4(position, 1.0f);
    frag_colour = vertex_colour;
}


// Fragment shader

#version 330

in vec4 frag_colour;

void main()
{
    gl_FragColor = frag_colour;
}

You’re missing a call to “glEnableVertexAttribArray(1)” for the color array. Also note that gl_FragColor is supposed to be deprecated, so you’re relying on driver-dependent behaviour (i.e whether or not your driver will accept it) for this to work.

It’s only driver-dependent insofar as conformance is driver-dependent. A conforming driver will accept it.

The fragment shader uses “#version 330”. While gl_FragColor was listed as “deprecated” in that version, it remained available in both core and compatibility profiles up to GLSL version 4.10. In GLSL version 4.20 and later, it has been removed from the core profile; the shader must request a compatibility profile in order to use gl_FragColor.

[QUOTE=GClements;1278587]It’s only driver-dependent insofar as conformance is driver-dependent. A conforming driver will accept it.

The fragment shader uses “#version 330”. While gl_FragColor was listed as “deprecated” in that version, it remained available in both core and compatibility profiles up to GLSL version 4.10. In GLSL version 4.20 and later, it has been removed from the core profile; the shader must request a compatibility profile in order to use gl_FragColor.[/QUOTE]

Thanks for the correction. :slight_smile:

This is not quite correct. Quoting from GLSL 1.50:

The OpenGL API has a forward compatibility mode that will disallow use of deprecated features. If compiling in a mode where use of deprecated features is disallowed, their use causes compile time errors.

So, a conformant implementation will allow or disallow gl_FragColor depending on the context flags (specifically, CONTEXT_FLAG_FORWARD_COMPATIBLE_BIT). Back in GL3.x, 4.x on OSX, it’s always disallowed (along with every other deprecated GLSL construct.)

In the absence of glutInitContextFlags(GLUT_FORWARD_COMPATIBLE), that shouldn’t be relevant. But maybe that’s overridden (forced on) for OSX?

Indeed, it is forced on in OSX Core Profiles.

[QUOTE=arekkusu;1278591]This is not quite correct. Quoting from GLSL 1.50:

So, a conformant implementation will allow or disallow gl_FragColor depending on the context flags (specifically, CONTEXT_FLAG_FORWARD_COMPATIBLE_BIT). Back in GL3.x, 4.x on OSX, it’s always disallowed (along with every other deprecated GLSL construct.)[/QUOTE]

This is kind of an odd thing. See, there’s “deprecated” and then there’s “compatibility profile”. “Deprecated” means “still available in core until we decide to get rid of it”, while “compatibility profile” means “you can only get this if your OpenGL context is a compatibility context and you compile the shader appropriately”.

Until GLSL 4.20, gl_FragColor was marked “deprecated”; in 4.20 and above, it was put into the “compatibility profile”. It really should never have been in the core profile (indeed, its removal in 4.2 was considered a mere bug-fix), but core GL versions 3.2 and above do explain how it works and interacts with other core profile stuff.

So a conformant implementation of GL 3.2 core profile will allow the use of gl_FragColor. But a conformant implementation of GL 4.2 core profile will not. Either way, if your intent was to use the core profile, you shouldn’t have been using them at all.