Hi,
OpenGL 4.1 new viewport features raised my interest for layered rendering.
I started my experiment with nVidia OpenGL 4.1 drivers but gl_ViewportIndex isn’t supported yet so I went back to OpenGL 4.0 implementation but still had issues and went back to OpenGL 3.3 capabilities but I still have some issues…
On nVidia I have an invalid operation error at draw call which is discard. On AMD the color attachment 2 and 3 takes the values of color attachment 1 using the OpenGL 3.3 implementation.
I suspect an (some) error in my experiment:
Here is my framebuffer object setup:
glGenFramebuffers(1, &FramebufferName);
glBindFramebuffer(GL_FRAMEBUFFER, FramebufferName);
glFramebufferTextureLayer(GL_FRAMEBUFFER, GL_COLOR_ATTACHMENT0, TextureColorbufferName, 0, 0);
glFramebufferTextureLayer(GL_FRAMEBUFFER, GL_COLOR_ATTACHMENT1, TextureColorbufferName, 0, 1);
glFramebufferTextureLayer(GL_FRAMEBUFFER, GL_COLOR_ATTACHMENT2, TextureColorbufferName, 0, 2);
glFramebufferTextureLayer(GL_FRAMEBUFFER, GL_COLOR_ATTACHMENT3, TextureColorbufferName, 0, 3);
GLenum DrawBuffers[4]= {GL_COLOR_ATTACHMENT0, GL_COLOR_ATTACHMENT1, GL_COLOR_ATTACHMENT2, GL_COLOR_ATTACHMENT3};
glDrawBuffers(4, DrawBuffers);
TextureColorbufferName is a texture array 2d, nothing specific I guess:
glGenTextures(1, &TextureColorbufferName);
glActiveTexture(GL_TEXTURE0);
glBindTexture(GL_TEXTURE_2D_ARRAY, TextureColorbufferName);
glTexParameteri(GL_TEXTURE_2D_ARRAY, GL_TEXTURE_MIN_FILTER, GL_NEAREST);
glTexParameteri(GL_TEXTURE_2D_ARRAY, GL_TEXTURE_MAG_FILTER, GL_NEAREST);
glTexParameteri(GL_TEXTURE_2D_ARRAY, GL_TEXTURE_BASE_LEVEL, 0);
glTexParameteri(GL_TEXTURE_2D_ARRAY, GL_TEXTURE_MAX_LEVEL, 1000);
glTexParameteri(GL_TEXTURE_2D_ARRAY, GL_TEXTURE_SWIZZLE_R, GL_RED);
glTexParameteri(GL_TEXTURE_2D_ARRAY, GL_TEXTURE_SWIZZLE_G, GL_GREEN);
glTexParameteri(GL_TEXTURE_2D_ARRAY, GL_TEXTURE_SWIZZLE_B, GL_BLUE);
glTexParameteri(GL_TEXTURE_2D_ARRAY, GL_TEXTURE_SWIZZLE_A, GL_ALPHA);
glTexImage3D(
GL_TEXTURE_2D_ARRAY,
0,
GL_RGB,
GLsizei(FRAMEBUFFER_SIZE.x),
GLsizei(FRAMEBUFFER_SIZE.y),
GLsizei(4), //depth
0,
GL_RGB,
GL_UNSIGNED_BYTE,
NULL);
And here is my test shader. It is supposed to fill each layer with a color.
Vertex shader:
#version 330 core
precision highp int;
// Declare all the semantics
#define ATTR_POSITION 0
#define ATTR_COLOR 3
#define ATTR_TEXCOORD 4
#define FRAG_COLOR 0
layout(location = ATTR_POSITION) in vec2 Position;
void main()
{
gl_Position = vec4(Position, 0.0, 1.0);
}
Geometry shader:
#version 330 core
precision highp int;
// Declare all the semantics
#define ATTR_POSITION 0
#define ATTR_COLOR 3
#define ATTR_TEXCOORD 4
#define FRAG_COLOR 0
layout(triangles) in;
flat out int GeomInstance;
uniform mat4 MVP;
void main()
{
for(int Layer = 0; Layer < 4; ++Layer)
{
gl_Layer = Layer;
for(int i = 0; i < gl_in.length(); ++i)
{
gl_Position = MVP * gl_in[i].gl_Position;
GeomInstance = Layer;
EmitVertex();
}
EndPrimitive();
}
}
Fragment shader:
#version 330 core
precision highp int;
// Declare all the semantics
#define ATTR_POSITION 0
#define ATTR_COLOR 3
#define ATTR_TEXCOORD 4
#define FRAG_COLOR 0
const vec4 Color[4] = vec4[]
(
vec4(1.0, 0.0, 0.0, 1.0),
vec4(1.0, 1.0, 0.0, 1.0),
vec4(0.0, 1.0, 0.0, 1.0),
vec4(0.0, 0.0, 1.0, 1.0)
);
flat in int GeomInstance;
layout(location = FRAG_COLOR, index = 0) out vec4 FragColor;
void main()
{
FragColor = Color[GeomInstance];
}
Any idea on what could be wrong?
Thanks!