[SOLVED] OpenGL 3.3 wrong coordinate drawing / viewport

Hi folks,

I started learning opengl 3.3 and i clearly defined my triangle coordinates as:

#include "ref.h"

int main(void){
	WindowInit(&window, "Test", WINW, WINH, SDL_WINDOW_SHOWN | SDL_WINDOW_OPENGL);
	game = GameInit(60);

	VAO vaodef = VAOInit(1);
	VAOCreate(&vaodef);
	VAOBind(&vaodef);

	const GLfloat triangle_data[] = {
		-1.0f, -1.0f, 0.0f,
		1.0f, -1.0f, 0.0f,
		0.0f, 1.0f, 0.0f,
	};

	Mesh triangle = MeshInit();
	triangle.Load(&triangle, triangle_data, vsize(triangle_data), 1);
	triangle.index = 0; /* Vertex Attrib Index */

	Shader triangle_shader = ShaderInit();
	triangle_shader.Load(&triangle_shader, "TriangleVertexShader.glsl", "TriangleFragmentShader.glsl");

	while(game.running == true){
		game.frameStart = SDL_GetTicks();
		glc(0x00, 0x00, 0x00, 0x00);

		if(SDL_PollEvent(&game.ev) != 0){
			if(game.ev.type == SDL_KEYDOWN){
				switch(GetInput()){
				case SDL_QUIT:
					game.running = false;
					break;
				}
			}
		}

		triangle_shader.Use(&triangle_shader);
		triangle.Draw(&triangle);
		glUseProgram(0);

		glswap;
		game.frameTime = SDL_GetTicks() - game.frameStart;
		handleFPS();
	}

	WindowDestroy(&window);
}

But output is this:

How can i fix this? Upper triangle edge should be high to the top of the window…

EDIT: i forgot shaders…

Vertex Shader

#version 330 core

layout(location = 0) in vec3 aPos;

void main(){
     gl_Position = vec4(aPos, 1.0);
}

Fragment Shader

#version 330 core

out vec3 color;

void main(){
     color = vec3(1,0,0);
}

Has the window been resized? You need to call glViewport if the window size changes (the viewport is set to the current dimensions of the window the first time a context is bound, but it won’t automatically be updated if the window size changes).

Other than that, I have no idea. The code you’ve shown doesn’t include any OpenGL calls except for glUseProgram(0). I’m assuming that they’re hidden inside the methods of the VAO, Mesh, Shader, etc classes, but we can only guess at what’s in there.

I have no problem sharing that, that library will be GPLv3 in the future anyway.

VAO

    VAO VAOInit(int num){
    	VAO vao;

    	vao.vid = 0;
    	vao.num = num;
    	vao.Bind = &VAOBind;
    	
    	return vao;
    }

    void VAOCreate(VAO *v){
    	glGenVertexArrays(v->num, &v->vid);
    }

    void VAOBind(VAO *v){
    	glBindVertexArray(v->vid);
    }

Mesh

    Uint8 MeshLoad(Mesh *mesh, const GLfloat *vertex, Uint8 numVertices, Uint8 num_buffers){
    	mesh->num_vertices = numVertices;
    	mesh->num_buffers = num_buffers;

    	glGenBuffers(num_buffers, &mesh->vertexArrayBuffer);
    	glBindBuffer(GL_ARRAY_BUFFER, mesh->vertexArrayBuffer);
    	glBufferData(GL_ARRAY_BUFFER, sizeof(vertex) * numVertices, vertex , GL_STATIC_DRAW);
    	
    	return 0;
    }

    Uint8 MeshDraw(Mesh *mesh){

    	glEnableVertexAttribArray(0);
    	glBindBuffer(GL_ARRAY_BUFFER, mesh->vertexArrayBuffer);

    	glVertexAttribPointer(mesh->index, 3, GL_FLOAT, GL_FALSE, 0, (void*)0);
    	glDrawArrays(GL_TRIANGLES, 0, mesh->num_vertices);

    	glDisableVertexAttribArray(0);

    	return 0;
    }

Solved, the problem was vsize() macro. I just used sizeof(triangle_data) and everything is fine now… Thanks everyone for their time.