SDL and OpenGL Texturing Problems.

Due to my current setup, I’m stuck using OpenGL 2.1/glsl 1.2. I’m trying to do things in as modern a way as I can. I’ve got a custom model loaded and displaying a texture, but the uv coordinates don’t seem to be working correctly. Some faces have a really high definition mapping and others have a drastically stretched mapping. I’m parsing the information in from an .obj file. I’ve debugged the values and they seem to be coming in fine.

Vertex shader:


#version 120

uniform mat4 model;
uniform mat4 view;
uniform mat4 projection;

attribute vec3 position;
attribute vec3 normal;
attribute vec2 uv;

varying vec2 o_uv;
varying vec3 o_normal;

void main()
{
	o_uv = uv;
	o_normal = normal;

	gl_Position = projection * view * model * vec4(position, 1);
}

Fragment shader:


#version 120

uniform sampler2D texture;

varying vec2 o_uv;
varying vec3 o_normal;

void main()
{
	gl_FragColor = texture2D(texture, o_uv);
}


Here is where I set up the buffers:


	std::vector<float> vertices;
	std::vector<float> uvs;
	std::vector<float> normals;
	std::vector<int> indices;

	std::string mesh_name = "sphere_inset";

	load_mesh(
		"../../../dist/media/models/" + mesh_name + ".obj",
		vertices, uvs, normals, indices, vertex_count);

	glGenVertexArrays(1, &vao);
	glBindVertexArray(vao);

	glGenBuffers(1, &vert_id);
	glBindBuffer(GL_ARRAY_BUFFER, vert_id);

	glBufferData(
		GL_ARRAY_BUFFER,
		8 * vertex_count * sizeof(GLfloat),
		nullptr,
		GL_STATIC_DRAW);

	glBufferSubData(
		GL_ARRAY_BUFFER,
		0,
		3 * vertex_count * sizeof(GLfloat),
		&vertices[0]);
	glBufferSubData(
		GL_ARRAY_BUFFER,
		3 * vertex_count * sizeof(GLfloat),
		2 * vertex_count * sizeof(GLfloat),
		&uvs[0]);
	glBufferSubData(
		GL_ARRAY_BUFFER,
		5 * vertex_count * sizeof(GLfloat),
		3 * vertex_count * sizeof(GLfloat),
		&normals[0]);

	glGenBuffers(1, &index_id);
	glBindBuffer(GL_ELEMENT_ARRAY_BUFFER, index_id);

	glBufferData(
		GL_ELEMENT_ARRAY_BUFFER,
		index_count * sizeof(GLuint),
		&indices[0], GL_STATIC_DRAW);

	position_id = glGetAttribLocation(program, "position");
	glVertexAttribPointer(position_id, 3, GL_FLOAT, GL_FALSE, 0, 0);
	glEnableVertexAttribArray(position_id);

	normal_id = glGetAttribLocation(program, "normal");
	glVertexAttribPointer(normal_id, 3, GL_FLOAT, GL_FALSE, 0, 0);
	glEnableVertexAttribArray(normal_id);

And here is some of the code I use to load the texture:


	glEnable(GL_TEXTURE_2D);
	glGenTextures(1, &texture_id);
	glBindTexture(GL_TEXTURE_2D, texture_id);

	SDL_Surface* surface = IMG_Load(file_path.c_str());

	if (!surface)
	{
		SDL_Log(SDL_GetError());
		return 0;
	}

	int color_mode = GL_RGB;

	if (surface->format->BytesPerPixel == 4)
		color_mode = GL_RGBA;

	glTexImage2D(
		GL_TEXTURE_2D, 0, color_mode,
		surface->w, surface->h,
		0, color_mode, GL_UNSIGNED_BYTE,
		surface->pixels);

	glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR);
	glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
	glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_REPEAT);
	glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_REPEAT);

	tex_id = glGetUniformLocation(program, "texture");
	glActiveTexture(GL_TEXTURE0);
	glUniform1i(tex_id, 0);

	uv_id = glGetAttribLocation(program, "uv");
	glVertexAttribPointer(uv_id, 2, GL_FLOAT, GL_FALSE, 0, 0);
	glEnableVertexAttribArray(uv_id);

	SDL_FreeSurface(surface);

The model’s shape seems to display just fine, but the textures are all different. The texture I’m loading is a .png file.

You’re storing the different arrays (position, normal, texture coordinates) in different regions of the same buffer, but you’re passing an offset of 0 to glVertexAttribPointer() for all three attributes, meaning that the position data is also being used for the normals and texture coordinates. You should be passing the same offsets you passed to glBufferSubData(), i.e.


	glVertexAttribPointer(normal_id, 3, GL_FLOAT, GL_FALSE, 0, (const GLvoid*)(5 * vertex_count * sizeof(GLfloat)));
	glVertexAttribPointer(uv_id,     2, GL_FLOAT, GL_FALSE, 0, (const GLvoid*)(3 * vertex_count * sizeof(GLfloat)));

Ah, thanks! That fixed it up. I was suspicious of those offsets, but hadn’t put it together yet.