How do I put a texture on my object?

I’m trying to put a texture on my object (which is a 3D cube) but I’m not sure how to do it because I don’t get the correct result.

This is where I initialize everything:

void OpenGLWindow::initGL()
{
    .
    .
    .
    glGenVertexArrays(1, &vao);
    glBindVertexArray(vao);

    shader = loadShaderProgram("simple.vert", "simple.frag");
    glUseProgram(shader);

	// ambient
	glUniform3f(glGetUniformLocation(shader, "objectColor"), 1.0f, 0.5f, 0.31f);
	glUniform3f(glGetUniformLocation(shader, "lightColor"), 1.0f, 1.0f, 1.0f);
	glUniform3fv(glGetUniformLocation(shader, "lightPos"), 1, &lightPos[0]);
	glUniform3f(glGetUniformLocation(shader, "viewPos"), 0.0f, 0.0f, 3.0f);

	GLuint texture;
	glGenTextures(1, &texture);
	glBindTexture(GL_TEXTURE_2D, texture);

	int width, height, nrChannels;
	unsigned char *data = stbi_load("container.png", &width, &height, &nrChannels, 0);

	if (data) {
		glTexImage2D(GL_TEXTURE_2D, 0, GL_RGB, width, height, 0, GL_RGB, GL_UNSIGNED_BYTE, data);
		glGenerateMipmap(GL_TEXTURE_2D);
	}

	else {
		std::cout << "Failed to load texture" << std::endl;
	}

	stbi_image_free(data);

    // Set our viewing and projection matrices, since these do not change over time
    glm::mat4 projectionMat = glm::perspective(glm::radians(90.0f), 4.0f/3.0f, 0.1f, 10.0f);
    int projectionMatrixLoc = glGetUniformLocation(shader, "projectionMatrix");
    glUniformMatrix4fv(projectionMatrixLoc, 1, false, &projectionMat[0][0]);

    glm::vec3 eyeLoc(0.0f, 0.0f, 2.0f);
    glm::vec3 targetLoc(0.0f, 0.0f, 0.0f);
    glm::vec3 upDir(0.0f, 1.0f, 0.0f);
    glm::mat4 viewingMat = glm::lookAt(eyeLoc, targetLoc, upDir);
    int viewingMatrixLoc = glGetUniformLocation(shader, "viewingMatrix");
    glUniformMatrix4fv(viewingMatrixLoc, 1, false, &viewingMat[0][0]);

    // Load the model that we want to use and buffer the vertex attributes
    //geometry.loadFromOBJFile("sphere.obj");
    geometry.loadFromOBJFile("cube.obj");

	GLuint vertexbuffer;
	glGenBuffers(1, &vertexbuffer);
	glBindBuffer(GL_ARRAY_BUFFER, vertexbuffer);
	glBufferData(GL_ARRAY_BUFFER, 3*geometry.vertexCount()*sizeof(float), geometry.vertexData(), GL_STATIC_DRAW);
	glEnableVertexAttribArray(0);
	glBindBuffer(GL_ARRAY_BUFFER, vertexbuffer);
	glVertexAttribPointer(0, 3, GL_FLOAT, GL_FALSE, 0, (void*)0);

	GLuint texturebuffer;
	glGenBuffers(1, &texturebuffer);
	glBindBuffer(GL_ARRAY_BUFFER, texturebuffer);
	glBufferData(GL_ARRAY_BUFFER, sizeof(geometry.textureCoordData()) * sizeof(glm::vec3), geometry.textureCoordData(), GL_STATIC_DRAW);
	glEnableVertexAttribArray(1);
	glBindBuffer(GL_ARRAY_BUFFER, texturebuffer);
	glVertexAttribPointer(1, 2, GL_FLOAT, GL_FALSE, 0, (void*)0);

	GLuint normalbuffer;
	glGenBuffers(1, &normalbuffer);
	glBindBuffer(GL_ARRAY_BUFFER, normalbuffer);
	glBufferData(GL_ARRAY_BUFFER, sizeof(geometry.normalData()) * sizeof(glm::vec3), geometry.normalData(), GL_STATIC_DRAW);
	glEnableVertexAttribArray(2);
	glBindBuffer(GL_ARRAY_BUFFER, normalbuffer);
	glVertexAttribPointer(2, 3, GL_FLOAT, GL_FALSE, 0, (void*)0);

    glPrintError("Setup complete", true);
}

I’m reading an .obj file which has the coordinates of the vertices, normals and textures already using a class which stores each coordinates in an appropriate vector;

for e.g. std::vector vertices; std::vector textureCoords; std::vector normals;

How can I send the values inside the texture’s vector to the shader?

This is my vertex shader:

#version 330 core
in vec3 position;

out vec3 FragPos;
out vec3 Normal;
out vec2 TexCoord;

uniform mat4 projectionMatrix;
uniform mat4 viewingMatrix;
uniform mat4 modelMatrix;

void main()
{
    vec4 transformedPosition = projectionMatrix * viewingMatrix * modelMatrix * vec4(position, 1.0f);
    gl_Position = transformedPosition;
    FragPos = vec3(modelMatrix * vec4(position, 1.0));
    Normal = mat3(transpose(inverse(modelMatrix))) * position;
}

Fragment shader:

out vec4 outColor;
in vec3 Normal;
in vec3 FragPos;

uniform vec3 lightPos;
uniform vec3 objectColor;
uniform vec3 lightColor;
uniform vec3 viewPos;

void main()
{
    float ambientStrength = 0.06;
    vec3 ambient = ambientStrength * lightColor;

    vec3 norm = normalize(Normal);
    vec3 lightDir = normalize(lightPos - FragPos);
    float diff = max(dot(norm, lightDir), 0.0);
    vec3 diffuse = diff * lightColor;

    float specularStrength = 0.1;
    vec3 viewDir = normalize(viewPos - FragPos);
    vec3 reflectDir = reflect(-lightDir, norm);
    float spec = pow(max(dot(viewDir, reflectDir), 0.0), 32);
    vec3 specular = specularStrength * spec * lightColor;
    vec3 result = (ambient + diffuse + specular) * objectColor;
    outColor = vec4(result, 1.0);
}

Also, I’m using SDL for this.

Using [var]sizeof[/var] here is almost certainly wrong. [var]sizeof[/var] will evaluate to the size of the type, not the data. If the values are stored in a std::vector, you need to use the .size() method.

I tried using the .size() method, I get this error:

error: request for member ‘size’ in ‘((OpenGLWindow*)this)->OpenGLWindow::geometry.GeometryData::textureCoordData()’, which is of non-class type ‘void*’
glBufferData(GL_ARRAY_BUFFER, geometry.textureCoordData().size() * sizeof(glm::vec3), geometry.textureCoordData(), GL_STATIC_DRAW);

Well clearly textureCoordData() isn’t returning a vector (or a reference to one).

You should probably use 2*geometry.vertexCount()sizeof(float) for the texture coordinates and 3geometry.vertexCount()*sizeof(float) for the normals.

[QUOTE=GClements;1292326]Well clearly textureCoordData() isn’t returning a vector (or a reference to one).

You should probably use 2*geometry.vertexCount()sizeof(float) for the texture coordinates and 3geometry.vertexCount()*sizeof(float) for the normals.[/QUOTE]

Why do I get these weird lines on my object when I rotate or zoom in/out?

[ATTACH=CONFIG]1832[/ATTACH]

[ATTACH=CONFIG]1833[/ATTACH]

And the texture doesn’t look like my picture.

[ATTACH=CONFIG]1834[/ATTACH]

This is my vertex shader:

#version 330 core
layout (location = 0) in vec3 aPos;
layout (location = 1) in vec2 aTexture;

out vec3 FragPos;
out vec3 Normal;
out vec2 TexCoord;

uniform mat4 projectionMatrix;
uniform mat4 viewingMatrix;
uniform mat4 modelMatrix;

void main()
{
    FragPos = vec3(modelMatrix * vec4(aPos, 1.0));
    Normal = mat3(transpose(inverse(modelMatrix))) * aPos;
    TexCoord = aTexture;

    vec4 transformedPosition = projectionMatrix * viewingMatrix * modelMatrix * vec4(aPos, 1.0f);
    gl_Position = transformedPosition;
}

Fragment shader:

#version 330 core
out vec4 outColor;
in vec3 Normal;
in vec3 FragPos;
in vec2 TexCoord;

uniform vec3 lightPos;
uniform vec3 objectColor;
uniform vec3 lightColor;
uniform vec3 viewPos;
uniform sampler2D u_texture;

void main()
{
    vec4 textureColor = texture(u_texture, TexCoord);
    float ambientStrength = 0.5;
    vec3 ambient = ambientStrength * lightColor;

    vec3 norm = normalize(Normal);
    vec3 lightDir = normalize(lightPos - FragPos);
    float diff = max(dot(norm, lightDir), 0.0);
    vec3 diffuse = diff * lightColor;

    float specularStrength = 0.5;
    vec3 viewDir = normalize(viewPos - FragPos);
    vec3 reflectDir = reflect(-lightDir, norm);
    float spec = pow(max(dot(viewDir, reflectDir), 0.0), 64);
    vec3 specular = specularStrength * spec * lightColor;
    vec3 result = (ambient + diffuse + specular) * textureColor.rgb;
    outColor = vec4(result, 1.0);
}

Can you please have a look?

Fixed it, now my object is loading the texture with the correct image without the weird colorful lines. I had to change

GL_RGB

to

GL_RGBA
glTexImage2D(GL_TEXTURE_2D, 0, GL_RGBA, width, height, 0, GL_RGBA, GL_UNSIGNED_BYTE, data);

Using the vertex position as the normal is incorrect unless you’re rendering a sphere.

  1. You have weights of 0.5 for each of ambient and specular, and 1.0 (implicit) for diffuse. At the extreme, the total amount of light could be 2.0. The values written to the framebuffer will be clamped to 1.0, but this is only done at the very end, and it’s done individually for each component. Light intensities greater than 1.0 will result in the largest components being more likely to be clamped, which will result in the colours being washed out.

  2. The texture colour should normally only be applied to ambient and diffuse lighting, not specular lighting. Most shiny objects are shiny because the surface is shiny, transparent and colourless, reflecting all colours equally. If you look at the reflections from coloured plastics, gloss paint, etc, the reflections are white (or the colour of the light) regardless of the colour of the plastic, paint, etc. The main exception is coloured metals such as gold or copper, which colour the reflected light.

So you should have:


    vec3 result = (ambient + 0.5 * diffuse) * textureColor.rgb + specular;

Provided that the ambient and diffuse weights sum to less than 1.0, you’ll only get saturation within specular highlights (where saturation would be expected).

How would you suggest I add a second light source at a different position to the world?