hello
i’ve got a strange problem : it seems that opengl always converts my 32bit textures to 16bit, but i know exactly that i’ve initialized a 32 bit gfx mode (or 24bit, who cares=)
i use the following code to generate/upload the texture :
unsigned char texture[256][256][4];
for (int y=0; y<256; y++) {
for (int x=0; x<256; x++) {
texture[y][x][0]=x;
texture[y][x][1]=0;
texture[y][x][2]=y;
texture[y][x][3]=255;
}
}
glTexEnvi(GL_TEXTURE_ENV,GL_TEXTURE_ENV_MODE,GL_MODULATE);
glTexParameteri(GL_TEXTURE_2D,GL_TEXTURE_MAG_FILTER,GL_NEAREST);
glTexParameteri(GL_TEXTURE_2D,GL_TEXTURE_MIN_FILTER,GL_NEAREST);
glTexParameteri(GL_TEXTURE_2D,GL_TEXTURE_WRAP_S,GL_REPEAT);
glTexParameteri(GL_TEXTURE_2D,GL_TEXTURE_WRAP_T,GL_REPEAT);
glTexImage2D(GL_TEXTURE_2D,0,4,256,256,0,GL_RGBA,GL_UNSIGNED_BYTE,texture);