Frag Y coord starts at -600 rather than 0 after adding frame buffer?

After adding a frame buffer and then drawing to it, the default frame buffers frag coordinate y becomes -600 at the bottom and the top is 0, rather than bottom being 0 and top being 600.

I’ve messed around with the window size, the frame buffer size, and the viewport, scissor. But nothing has changed the frag coordinate from being negative.

The moment I draw to the frame buffer, the frag coordinate changes.

To test the frag coord, I check if the y coord is over -300, and brighten the colour if so. Since it goes to -600, half the screen lights up.

Two things that solve it is using a seperate shader program, or adding changing to a third frame buffer and clearing GL_COLOR_BUFFER_BIT.

I can’t find anything online that explains why this could happen though.

Heres the framebuffer init:

float vertices[] = {
    // pos      // tex
    -1.0f,  1.0f,  0.0f, 1.0f,
    -1.0f, -1.0f,  0.0f, 0.0f,
     1.0f, -1.0f,  1.0f, 0.0f,

    -1.0f,  1.0f,  0.0f, 1.0f,
     1.0f, -1.0f,  1.0f, 0.0f,
     1.0f,  1.0f,  1.0f, 1.0f
};

unsigned int FBO;
unsigned int quadVAO;
unsigned int texture;

glGenVertexArrays(1, &quadVAO);

glGenBuffers(1, &VBO);
glBindVertexArray(quadVAO);
glBindBuffer(GL_ARRAY_BUFFER, VBO);
glBufferData(GL_ARRAY_BUFFER, sizeof(vertices), &vertices, GL_STATIC_DRAW);
glEnableVertexAttribArray(0);
glVertexAttribPointer(0, 4, GL_FLOAT, GL_FALSE, 4 * sizeof(float), (void*)0);
glBindBuffer(GL_ARRAY_BUFFER, 0);
glBindVertexArray(0);

// load framebuffer
glGenFramebuffers(1, &FBO);
glBindFramebuffer(GL_FRAMEBUFFER, FBO);

glGenTextures(1, &texture);
glBindTexture(GL_TEXTURE_2D, texture);

glTexImage2D(GL_TEXTURE_2D, 0, GL_RGB, Width, Height, 0, GL_RGB, GL_UNSIGNED_BYTE, NULL);

glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_NEAREST);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_NEAREST);
glBindTexture(GL_TEXTURE_2D, 0);

glFramebufferTexture2D(GL_FRAMEBUFFER, GL_COLOR_ATTACHMENT0, GL_TEXTURE_2D, texture, 0);
// renderbufferobject
unsigned int RBO;
glGenRenderbuffers(1, &RBO);
glBindRenderbuffer(GL_RENDERBUFFER, RBO);
glRenderbufferStorage(GL_RENDERBUFFER, 0, Width, Height);

The render code:

    // frame buffer
    glBindFramebuffer(GL_FRAMEBUFFER, FBO);
    glClearColor(1.0f, 0.16f, 0.32f, 1.0f);
    glClear(GL_COLOR_BUFFER_BIT);
    glUniform4f(glGetUniformLocation(shaderProgram, "colour"), 1.0,
      1.0, 0.3, 1.0);

    // draw square
    glUseProgram(shaderProgram);
    glBindVertexArray(VAO);
    glDrawArrays(GL_TRIANGLES, 0, 6);

    // back to default frame buffer
    glBindFramebuffer(GL_FRAMEBUFFER, 0);
    glUniform4f(glGetUniformLocation(shaderProgram, "colour"), 1.0, 
      0.0, 0.0, 1.0);
    
    // draw square
    glUseProgram(shaderProgram);
    glBindVertexArray(VAO);
    glDrawArrays(GL_TRIANGLES, 0, 6);

The fragment shader, checking if frag coord y goes to -600:

#version 330 core

out vec4 FragColour;
uniform vec4 colour;

vec4 light = vec4(0.1f, 0.1f, 0.1f, 0.1f);
void main()
{
   if (gl_FragCoord.y > -300)
   {
     light = vec4(1.0f, 1.0f, 1.0f, 1.0f);
   }
   FragColour = colour * light;
}

That looks like a driver bug. No component of gl_FragCoord should ever be negative. Although I suppose it’s possible that there’s a bug in your program which results in undefined behaviour rather than an error.

You’re right it’s a driver bug! I just ran the code on a different pc and there is no change in frag coordinates. Thanks for your help