Problems drawing my texture

Hi,

Long time lurker making this first post. First off, thanks for all the good posts here.

I have a problem drawing my texture to the screen.
I have the following code:
The class which loads and draws the image.

#include "Texture.h"

Texture::Texture() {
	//Something

}


Texture::Texture(std::string filename) {
	h = Hilfer();
	std::cout << filename << std::endl;
	ILuint ImgId = 0;
	ilGenImages(1, &ImgId);
	ilBindImage(ImgId);
	// Lame conversion to wchar_t*
	std::wstring widestring = std::wstring(filename.begin(), filename.end());
	const wchar_t* widecstr = widestring.c_str();

	ilLoadImage(widecstr);
	ILubyte *Data = ilGetData();
	//ilDeleteImages(1, &ImgId);

	glGenTextures(1, &texture);
	glActiveTexture(GL_TEXTURE0);
	glBindTexture(GL_TEXTURE_2D, texture);

	// Set filter
	glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_NEAREST);
	glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_NEAREST);

	// Set image

		glTexImage2D(
			GL_TEXTURE_2D, 
			GLint(0), 
			ilGetInteger(IL_IMAGE_BYTES_PER_PIXEL), 
			GLsizei(ilGetInteger(IL_IMAGE_WIDTH)), 
			GLsizei(ilGetInteger(IL_IMAGE_HEIGHT)), 
			0,  
			ilGetInteger(IL_IMAGE_FORMAT), 
			ilGetInteger(IL_IMAGE_TYPE), 
			Data);

	glActiveTexture(GL_TEXTURE0);
	glBindTexture(GL_TEXTURE_2D, texture);

	// Initialize stuff here
	float vertexData[] = {
		0.5f,    0.5f, 0.0f, 1.0f, // Screen coordinates
		0.5f, -0.5f, 0.0f, 1.0f,
		-0.5f, -0.5f, 0.0f, 1.0f,
		-0.5f, 0.5f, 0.0f, 1.0f,
		1.0f,    1.0f, 0.0f, 1.0f, // Texture coordinates
		1.0f,    0.0f, 0.0f, 1.0f,
		0.0f,    0.0f, 1.0f, 1.0f,
		0.0f, 1.0f, 0.0f, 1.0f,
	};

	shaderList.push_back(shader.LoadShader(GL_VERTEX_SHADER, "texture.vert"));
	shaderList.push_back(shader.LoadShader(GL_FRAGMENT_SHADER, "texture.frag"));

	theProgram = shader.CreateProgram(shaderList);

	glGenBuffers(1, &vertexBufferObject);

	glBindBuffer(GL_ARRAY_BUFFER, vertexBufferObject);
	glBufferData(GL_ARRAY_BUFFER, sizeof(vertexData), vertexData, GL_STREAM_DRAW);
	glBindBuffer(GL_ARRAY_BUFFER, 0);
	glGenVertexArrays(1, &vao);
	glBindVertexArray(vao);
	texture_shader = glGetUniformLocation(theProgram, "texture");
	
}

void Texture::Load(std::string filename) {

}

void Texture::Draw() {

	// something something
	Refresh();

}

void Texture::Refresh() {


	glUseProgram(theProgram);
	glUniform1i(texture_shader, 0);
	glActiveTexture(GL_TEXTURE0);
	glBindTexture(GL_TEXTURE_2D, texture);


	glBindBuffer(GL_ARRAY_BUFFER, vertexBufferObject);
	glEnableVertexAttribArray(0);
	glEnableVertexAttribArray(1);
	glVertexAttribPointer(0, 4, GL_FLOAT, GL_FALSE, 0, 0);
	glVertexAttribPointer(1, 4, GL_FLOAT, GL_FALSE, 0, (void*)64);

	glDrawArrays(GL_QUADS, 0, 4);

	glDisableVertexAttribArray(0);
	glDisableVertexAttribArray(1);
	glUseProgram(0);
	glActiveTexture(GL_TEXTURE0);
	glBindTexture(GL_TEXTURE_2D, 0);
	h.checkGLError();


}


Texture::~Texture() {

}

void Texture::checkError() {
	ILenum Error;
	if((Error = ilGetError()) != IL_NO_ERROR) { 
		std::cout << Error << " : " << iluErrorString(Error) << std::endl;
	}
}

My fragment shader

#version 330

uniform sampler2D tex;
in vec4 verttexcoord;
out vec4 outputColor;
void main()
{
	vec2 texpos = vec2(verttexcoord.x, verttexcoord.y);
	outputColor = texture(tex, texpos);
}

and my vertex shader

#version 330

layout (location = 0) in vec4 position;
layout (location = 1) in vec4 texcoord;

out vec4 verttexcoord;
 
void main() {

	verttexcoord = texcoord;
	gl_Position = position;
}


.

The code compiles and runs fine. By running fine I mean the shaders compile fine and glGetError() doesn’t report any trouble. However, I don’t get a quad with my texture on it, but rather a brown-ish quad. I have tried fixing it, but I’m drawing a blank - I just can’t find the problem. Can you see what the problem is?

I have uploaded the Texture.cpp to here http://codepad.org/aqiXQdXW in case it was annoying in that inline box there.

Thanks

The third parameter to glTexImage2D is the internal format which should be either of GL_RGB,GL_RGBA,GL_LUMINANCE, GL_ALPHA or GL_LUMINANCE_ALPHA. I think u can pass the IL_IMAGE_FORMAT to this paramter.


glTexImage2D(
GL_TEXTURE_2D, 
0, 
ilGetInteger(IL_IMAGE_FORMAT),
GLsizei(ilGetInteger(IL_IMAGE_WIDTH)), 
GLsizei(ilGetInteger(IL_IMAGE_HEIGHT)), 
0,  
ilGetInteger(IL_IMAGE_FORMAT), 
GL_UNSIGNED_BYTE, //change this to the type of your Data variable i assume it is unsigned byte
Data);

Another Another thing not related to the problem but for performance of ur shader is that

  1. if u do not have multiple textures then u should leave the current texture bound. Dont remove and bind the texture every frame.
  2. u should use swizzle to obtain the coordinates rather than creating a new local variable and better dont use the local variable.

outputColor = texture(tex, verttexcoord.xy);

See if this helps.