Mapping problem

Hello, I tried and tried again to map a bmp image on a quad, but it only displays a white shape, anybody could tell me why?
Here is how I read and initialize the texture, the bmp file called “NeHe.bmp”(it is a 256*256 image):
BOOL __fastcall TForm1::LoadGLTextures()
{

if ( (datas = LoadBMP(“C:\NeHe.bmp”)) == NULL ) //LoadBMP is defined later
return FALSE;
glPixelStorei(GL_UNPACK_ALIGNMENT, 4);
glGenTextures(1, &texName);
glBindTexture(GL_TEXTURE_2D, texName);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_REPEAT);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_REPEAT);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_NEAREST);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_NEAREST);
glTexImage2D(GL_TEXTURE_2D, 0, 3, 256, 256, 0, GL_RGB, GL_UNSIGNED_BYTE, datas);

return TRUE;										// Texture chargée

}

//-----------------------------------------

void* __fastcall TForm1::LoadBMP(char *Filename) // Loads A Bitmap Image
{
FILE *fp;
BITMAPFILEHEADER header;
long int infosize,
datasize;
BITMAPINFO *info;
void *datas;

if ((fp = fopen( "c:\\NeHe.bmp", "rb")) == NULL)
{
	return NULL;
}

if (fread(&header, 1, sizeof(BITMAPFILEHEADER), fp) < sizeof(BITMAPFILEHEADER))
{
	fclose(fp);
	return NULL;
}

infosize = header.bfOffBits - sizeof(BITMAPFILEHEADER);

if ( (info = (BITMAPINFO *) malloc(infosize)) == NULL )
{
	fclose(fp);
	return NULL;
}

if (fread(info, 1, infosize, fp) < infosize)
{
	fclose(fp);
	free(info);
	return NULL;
}

datasize = info->bmiHeader.biSizeImage;

if ( (datas = malloc(datasize)) == NULL )
{
	fclose(fp);
	free(info);
	return NULL;
}

if ( fread(datas, 1, datasize, fp) < datasize )
{
	fclose(fp);
	free(info);
	free(datas);
	return NULL;
}

fclose(fp);

return datas;

}
//-----------------------------------------
//This is how I do the rendering :
//-----------------------------------------

void __fastcall TForm1::RenderGLScene()
{
glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT);
glLoadIdentity();

glEnable(GL_TEXTURE_2D);
glBindTexture(GL_TEXTURE_2D, texName);

glTranslatef(0.0f, 0.0f, -6.0f);
glRotatef(xrot, 1.0, 0.0, 0.0);
glRotatef(yrot, 0.0, 1.0, 0.0);
glRotatef(zrot, 0.0, 0.0, 1.0);

glBegin(GL_QUADS);
    glTexCoord2f(0.0, 0.0); glVertex3f(-2.0, -1.0, 0.0);
    glTexCoord2f(0.0, 1.0); glVertex3f(-2.0, 1.0, 0.0);
    glTexCoord2f(1.0, 1.0); glVertex3f(0.0, 1.0, 0.0);
    glTexCoord2f(1.0, 0.0); glVertex3f(0.0, -1.0, 0.0);
glEnd();

xrot += 0.3f;
yrot += 0.2f;
zrot += 0.4f;

}

I just took a quick look at the code and didn’t see anything too obviously wrong. One thing you might want to try is using GL_RGB8 for the internal format parameter instead of 3. That function takes 1,2,3, or 4 as a parameter, but it’s generally more acceptable to use one of the symbolic constants.

One other thing that isn’t obvious from your code is when you are actually loading the texture. You MUST initialize the pixelformat of the window to use OpenGL BEFORE you use any OpenGL calls. So if you are doing something like

LoadTexture();
InitializeWindow();

It’s not going to work because your OpenGL calls in LoadTexture will fail.