FrameBuffer Complete ??

Hi.,

Can anyone explain me what do i need to do to ensure that my FBO is framebuffer complete ??
My FBO code works fine in NVidia cards and fails in ATI cards(correct shadowmapping in Nvidia, all black in ATI), so my guess is that i’m not framebuffer complete because i get a 1286 error.
Accordingly to the fbo spec,

INVALID_FRAMEBUFFER_OPERATION_EXT 0x0506

I have been reading the specs, but i still don’t know if i’m framebuffer complete or not.
Can you guys take a look at my source, and see if there’s some error here, that may be ignored by Nivia and a cause for problems in ATI?

This is my fbo setup code :

GLuint g_BIG_SHADOWframeBuffer;
GLuint g_BIG_SHADOWdepthRenderBuffer;


glGenFramebuffersEXT( 1, &g_BIG_SHADOWframeBuffer );
glGenRenderbuffersEXT( 1, &g_BIG_SHADOWdepthRenderBuffer );
glBindRenderbufferEXT( GL_RENDERBUFFER_EXT, g_BIG_SHADOWdepthRenderBuffer );
glRenderbufferStorageEXT( GL_RENDERBUFFER_EXT, GL_DEPTH_COMPONENT16, 2048, 2048 );

//setup textures//

 
glGenTextures(1, &Big_Shadow);
glBindTexture(GL_TEXTURE_2D, Big_Shadow);
if(shadow_quality==1)
{
   glTexImage2D( GL_TEXTURE_2D, 0, GL_DEPTH_COMPONENT16, 1024, 1024, 0, GL_DEPTH_COMPONENT, GL_FLOAT, NULL);
}
else
{
   glTexImage2D( GL_TEXTURE_2D, 0, GL_DEPTH_COMPONENT16, 2048, 2048, 0, GL_DEPTH_COMPONENT, GL_FLOAT, NULL);
}
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_BORDER_ARB );
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_BORDER_ARB );


glTexParameterfv(GL_TEXTURE_2D, GL_TEXTURE_BORDER_COLOR, borderColor);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_COMPARE_MODE_ARB, GL_COMPARE_R_TO_TEXTURE_ARB);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_COMPARE_FUNC_ARB, GL_LEQUAL);
//
//
//
glGenTextures(1, &Local_Shadow);
glBindTexture(GL_TEXTURE_2D, Local_Shadow);
glTexImage2D( GL_TEXTURE_2D, 0, GL_DEPTH_COMPONENT16, 2048, 2048, 0, GL_DEPTH_COMPONENT, GL_FLOAT, NULL);
//
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_BORDER_ARB );
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_BORDER_ARB );
glTexParameterfv(GL_TEXTURE_2D, GL_TEXTURE_BORDER_COLOR, borderColor);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_COMPARE_MODE_ARB, GL_COMPARE_R_TO_TEXTURE_ARB);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_COMPARE_FUNC_ARB, GL_LEQUAL);

Render code

 
glBindFramebufferEXT( GL_FRAMEBUFFER_EXT, g_BIG_SHADOWframeBuffer );
if(shadow_quality == 1)
{
   glViewport( 0, 0, 1024, 1024 );
}
else
{
   glViewport( 0, 0, 2048, 2048 );
}
//
// render meshes far away
glFramebufferTexture2DEXT( GL_FRAMEBUFFER_EXT, GL_DEPTH_ATTACHMENT_EXT, GL_TEXTURE_2D, Big_Shadow, 0 ) ;
glFramebufferTexture2DEXT( GL_FRAMEBUFFER_EXT, GL_COLOR_ATTACHMENT0_EXT, GL_TEXTURE_2D, 0, 0 );
glDrawBuffer( GL_NONE ) ;
glReadBuffer( GL_NONE ) ;
//
//setup texture projections
//render meshes that cast shadows
//
//
glViewport( 0, 0, 2048, 2048 );
//render meshes close to the player
glBindFramebufferEXT( GL_FRAMEBUFFER_EXT, g_BIG_SHADOWframeBuffer );
glFramebufferTexture2DEXT( GL_FRAMEBUFFER_EXT, GL_DEPTH_ATTACHMENT_EXT, GL_TEXTURE_2D, Local_Shadow, 0 ) ;
glFramebufferTexture2DEXT( GL_FRAMEBUFFER_EXT, GL_COLOR_ATTACHMENT0_EXT, GL_TEXTURE_2D, 0, 0 );
glDrawBuffer( GL_NONE ) ;
glReadBuffer( GL_NONE ) ;
//
//
glBindFramebufferEXT( GL_FRAMEBUFFER_EXT, 0 );
glViewport( 0, 0, window->init.width, window->init.height );

thanks,
Bruno

I have been reading the specs, but i still don’t know if i’m framebuffer complete or not.
Um, there’s a function in the FBO spec that you can call to ask if a framebuffer is complete. You should use that.

Also, be aware that there can be unknowable, implementation-defined reasons for framebuffer incompleteness. That is, what is complete on one implementation can fail on another, possibly due to different texture formats, etc.

In your case, I’m guessing the use of the depth texture as a render target is something that ATi implementations do not support.

I gues that either your ATI card does not support rendering to 16bit depth textures, or it has problems with texture borders. Why won’t you use CLAMP_TO_EDGE?

You can use
int retsiz[4];
glGetRenderBufferParameterivEXT(GL_RENDERBUFFER_EXT, GL_RENDERBUFFER_DEPTH_SIZE_EXT, &retsiz);
if (retsiz[0]==‘requested depth value’)
succeded
else
failed

on some drivers it may allocate a depth buffer of different precision than the one you ask … e.g. if you ask for 32 bits you may get 24 or 16

you may also use
glGetRenderBufferParameterivEXT(GL_RENDERBUFFER_EXT, GL_RENDERBUFFER_WIDTH_EXT, &retsiz);

to see if the desired size has been allocated( and ultimately if the call has succeeded)