I wrote a small test application where I compute the precision of different pixel formats and when I use RGB10_A2 it looks like I have only 8 bits of precisions instead of 10 bits.
Here is my test application:
It works fine with the other formats I tested. I have a Radeon X1800 and the latest driver(Cat 6.7). Except for a driver bug, do you have any idea why it’s not working?
Here I create the texture:
glGenTextures(1, &m_unTexSceneRGB10_A2);
glBindTexture(GL_TEXTURE_2D, m_unTexSceneRGB10_A2);
glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP);
glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP);
glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_NEAREST);
glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_NEAREST);
glTexEnvf(GL_TEXTURE_ENV, GL_TEXTURE_ENV_MODE, GL_MODULATE);
glTexImage2D( GL_TEXTURE_2D, 0, GL_RGB10_A2, OFFSCREEN_BUFFER_SIZE, OFFSCREEN_BUFFER_SIZE, 0, GL_RGBA, GL_UNSIGNED_BYTE, NULL );
Here I create the FBO:
glGenRenderbuffersEXT(1, &m_unDepthBufferScene);
glBindRenderbufferEXT( GL_RENDERBUFFER_EXT, m_unDepthBufferScene );
glRenderbufferStorageEXT(GL_RENDERBUFFER_EXT, GL_DEPTH_COMPONENT24, OFFSCREEN_BUFFER_SIZE, OFFSCREEN_BUFFER_SIZE);
glGenFramebuffersEXT(1, &m_unFrameBufferSceneRGB10_A2);
glBindFramebufferEXT(GL_FRAMEBUFFER_EXT, m_unFrameBufferSceneRGB10_A2);
glFramebufferTexture2DEXT(GL_FRAMEBUFFER_EXT, GL_COLOR_ATTACHMENT0_EXT, GL_TEXTURE_2D, m_unTexSceneRGB10_A2, 0);
glFramebufferRenderbufferEXT(GL_FRAMEBUFFER_EXT, GL_DEPTH_ATTACHMENT_EXT, GL_RENDERBUFFER_EXT, m_unDepthBufferScene);