texture_buffer_object problem in geometry shader

I’m trying to read a buffer storing some vertex coordinates from the geometry shader. It is a really simple access in a really simple shader, only for testing, but it doesn’t work with an ATI Radeon 4850 and I can’t guess why. When I call to texelFetchBuffer() inside the GS I’m always getting zero, no matter the index I use; but if I call the function with the same parameters inside the vertex shader it does work.

I have Windows XP and the latest ATI drivers. Can you explain me what I’m doing wrong, please?

(BTW, this same source works without problems using an NVidia card)


// --- Load vertex data ---
glGenBuffers( NumBuffers, buffers );
glBindBuffer( GL_ARRAY_BUFFER, buffers[Vertices] );
glBufferData( GL_ARRAY_BUFFER, sizeof(vertexData),
			vertexData, GL_STATIC_DRAW );
			
glBindBuffer( GL_ELEMENT_ARRAY_BUFFER, buffers[Indices] );
glBufferData( GL_ELEMENT_ARRAY_BUFFER, sizeof(indices),
			indices, GL_STATIC_DRAW );

// --- Setup textures ---
glGenTextures( NumTextures, textures ); CHECK_GL_ERROR();
glBindTexture( GL_TEXTURE_BUFFER_EXT, textures[TexBuffer] ); CHECK_GL_ERROR();
glTexBufferEXT( GL_TEXTURE_BUFFER_EXT, GL_RGBA32F_ARB, buffers[Vertices] ); CHECK_GL_ERROR();
glBindTexture( GL_TEXTURE_BUFFER_EXT, 0 ); CHECK_GL_ERROR();


// --- Render ---
glUniform1f(elemDataSize, sizeof(VertexData)); // 32
glActiveTexture(GL_TEXTURE0); CHECK_GL_ERROR();
glBindTexture(GL_TEXTURE_BUFFER_EXT, textures[TexBuffer]); CHECK_GL_ERROR();
glUniform1i(tboSampler, 0); CHECK_GL_ERROR();

And the geometry shader is:

#version 120
#extension GL_EXT_geometry_shader4 : enable
#extension GL_EXT_gpu_shader4 : enable

uniform samplerBuffer tboSampler;

varying in vec4 colorIn[1];
varying in float gIdx[1];

varying out vec4 color;

void main() {
	for( int i = 0 ; i < gl_VerticesIn ; i++ )
	{
		vec4 posIn = gl_PositionIn[ i ];
		vec4 pos = gl_ModelViewProjectionMatrix * posIn;
		gl_Position  = pos;
		gl_ClipVertex = pos;
		color = colorIn[ i ];
		EmitVertex();

		int intIdx = int( 2*(gIdx[i] + 1) );
		posIn = vec4(texelFetchBuffer(tboSampler, intIdx).xyz, 1.0);
		pos = gl_ModelViewProjectionMatrix * posIn;		
		
		gl_Position    = pos;
	    gl_ClipVertex = pos;
		color = colorIn[ i ];
		EmitVertex();
	}
	EndPrimitive();
}

Does the input array size match that of the input primitive type? I’m thinking this should produce an type-size unfitness or out-of-bounds error in such a case. As it is you’re looping over the input so it would seem to me that you’re expecting something other than a point.

Thanks for the reply.
Yes, the input array size is ok. I know the input loop is not needed, and I’ve tried to remove it, but it’s failing in both ways. I think it’s something related to the ATI OpenGL drivers, so now i’m going on with my work on a NVidia card.