glTexSubImage

Hello,

I have a 256 x 256 x 256 texture, which i loaded into openGL using glTexImage3D:

glTexImage3DEXT(GL_TEXTURE_3D_EXT, 0, GL_LUMINANCE8, 256, 256, 256, 0, GL_LUMINANCE, GL_UNIGNED_BYTE, img);

Now I want to be able to change the value of single pixels by subloading pixel values one by one using:

glTexSubImage3DEXT(GL_TEXTURE_3D_EXT, 0, x, y, z, 1, 1, 1, GL_LUMINANCE, GL_UNSIGNED_BYTE, &p);

Where p is a single unsigned byte.

For some reason this only works for z == 0.
If z > 0 i get no result. I can’t work out why.

Can anyone help me?

Best regards
Soren Vorre

Some code:

----- sub loading code

void
OverlayTexture::setColor(int x, int y, int z, unsigned char c) {
if(!hasChanged && !usePixelSubLoad) {
hasChanged = true;
}
buffer[zsize_xsize_y + y*size_x + x] = c;
if(usePixelSubLoad && pixType != Texture::UNDEFINED_PIXTYPE)
{
if(tex != NULL)
{
printf("SubLoading %d to %d
",c,tex->texNames[0]);
glBindTexture(GL_TEXTURE_3D_EXT,tex->texNames[0]);
glTexSubImage3DEXT(GL_TEXTURE_3D_EXT, 0, //target, level
x, y, z, //offset
1, 1, 1, //dimensions
Palette::getPaletteTexType2(), //pixel type
GL_UNSIGNED_BYTE, //storage type
&c); //pixel
int er = glGetError();
printf(“er == %d”,er);
}
}
}

---- Texture setup code

void
OverlayTexture::loadTexture(unsigned char *img, int w, int h, int d) {

glPixelStorei(GL_UNPACK_ALIGNMENT, 1);
glTexParameteri(GL_TEXTURE_3D_EXT, GL_TEXTURE_WRAP_S, GL_CLAMP);
glTexParameteri(GL_TEXTURE_3D_EXT, GL_TEXTURE_WRAP_T, GL_CLAMP);
glTexParameteri(GL_TEXTURE_3D_EXT, GL_TEXTURE_WRAP_R, GL_CLAMP);
glTexParameteri(GL_TEXTURE_3D_EXT, GL_TEXTURE_MAG_FILTER, GL_NEAREST);
glTexParameteri(GL_TEXTURE_3D_EXT, GL_TEXTURE_MIN_FILTER, GL_NEAREST);
getGLError(“OverlayTexture::loadTexture() at glTexParameteri()()”);

if(texType == Texture::TEXTURE_3D) {
glTexImage3DEXT(GL_TEXTURE_3D_EXT, 0, Palette::getPaletteTexType1(), h, w, d, 0,
Palette::getPaletteTexType2(), GL_UNSIGNED_BYTE, img);
} else {
glTexImage2D(GL_TEXTURE_2D, 0, Palette::getPaletteTexType1(), h, w, 0,
Palette::getPaletteTexType2(), GL_UNSIGNED_BYTE, img);
}

getGLError(“OverlayTexture::loadTexture() at glTexImage3DEXT()”);

glTexEnvf(GL_TEXTURE_ENV, GL_TEXTURE_ENV_MODE, GL_REPLACE);
}

[This message has been edited by svorre (edited 01-05-2004).]

By experimenting with the size of the sub texture I found that the following code gives me the correct behavior for z < 128.

(correct behavior: a single visible pixel of the texture is updated)

void
OverlayTexture::setColor(int x, int y, int z, unsigned char c) {
if(!hasChanged && !usePixelSubLoad) {
hasChanged = true;
}
buffer[zsize_xsize_y + y*size_x + x] = c;
if(usePixelSubLoad && pixType != Texture::UNDEFINED_PIXTYPE)
{
if(tex != NULL)
{
unsigned char *l = new unsigned char[256];
memset(l,c,256);
glBindTexture(GL_TEXTURE_3D_EXT,tex->texNames[0]);
glTexSubImage3DEXT(GL_TEXTURE_3D_EXT, 0, //target, level
x, y, z, //offset
1, 1, z+1, //dimensions
Palette::getPaletteTexType2(), //pixel type
GL_UNSIGNED_BYTE, //storage type
l); //pixel
int er = glGetError();
printf(“er == %d”,er);
}
}
}

The plot thickens!

[This message has been edited by svorre (edited 01-05-2004).]

Just an error in the ATI Radeon driver.

I fixed it by installing the newest driver.