Can't get simple triangle to show up using indices

I’ve been starting a new project and was happily coding along until I decided to test it, and found that it, well, doesn’t work. So I tried simplifying my code as much as possible to find the error, up to a point where I’m now trying to make a simple triangle show up without success (I’ve gotten rid of basically everything - My shaders are now as basic as they can be, I don’t use any form of perspective projection / camera, etc)… I’ve been debugging using GLintercept and just can’t find the error (Which, knowing my errors in previous projects, is probably something stupid…), which is driving me crazy, so I was hoping someone could maybe help me out!
Here’s my simplified main function:

// Initialize OpenGL, you can skip reading this
if (!glfwInit()){return 1;}
glfwSetErrorCallback(onGLError);
glfwWindowHint(GLFW_OPENGL_DEBUG_CONTEXT, GL_TRUE);
GLFWwindow* window = glfwCreateWindow(800, 800, "Test", NULL, NULL);
if (!window){glfwTerminate();return 2;}
glfwMakeContextCurrent(window);
gladLoadGL();
glEnable(GL_DEBUG_OUTPUT);
glEnable(GL_DEBUG_OUTPUT_SYNCHRONOUS);
glDebugMessageCallback(debugMessage, NULL);
glDebugMessageControl(GL_DONT_CARE, GL_DONT_CARE, GL_DONT_CARE, 0, NULL, GL_TRUE);

// Setting up my Vertex Array Object and tuning some settings
GLuint vao;
glEnable(GL_DEPTH_TEST);
glDepthFunc(GL_LESS);
glEnable(GL_LINE_SMOOTH);
glEnable(GL_POLYGON_SMOOTH);
glEnable(GL_BLEND);
glBlendFunc(GL_SRC_ALPHA, GL_ONE_MINUS_SRC_ALPHA);
glHint(GL_LINE_SMOOTH_HINT, GL_NICEST);
glHint(GL_POLYGON_SMOOTH_HINT, GL_NICEST);
glPointSize(4);
glClearColor(0.9, 0.9, 0.9, 1);
glGenVertexArrays(1, &vao);
glBindVertexArray(vao);

// Can ignore, basically just the initiation of the shader. I'll post the shader constructor below, though.
copyShaderContents("bland.vert.glsl", "bland.frag.glsl", sv2, sf2);
Shader bland_prog(sv2, sf2);

// Here I'm finally trying to just make a goddamn simple triangle... 
std::vector<glm::vec4> tri{ glm::vec4(-.6, -.4, -.2, 1), glm::vec4(.6, -.4, 0, 1), glm::vec4(0, .6, 0, 1) };
std::vector<GLuint> l{ 0, 1, 2 };
GLuint bufIdx, bufPos;
glGenBuffers(1, &bufIdx);
glGenBuffers(1, &bufPos);
glBindBuffer(GL_ELEMENT_ARRAY_BUFFER, bufIdx);
glBufferData(GL_ELEMENT_ARRAY_BUFFER, l.size() * sizeof(GLuint), l.data(), GL_STATIC_DRAW);
glBindBuffer(GL_ARRAY_BUFFER, bufPos);
glBufferData(GL_ARRAY_BUFFER, tri.size() * sizeof(glm::vec4), tri.data(), GL_STATIC_DRAW);

// Main loop
while (!glfwWindowShouldClose(window)){
	int width, height;
	glfwGetFramebufferSize(window, &width, &height);
	glViewport(0, 0, width, height);
	glClear(GL_COLOR_BUFFER_BIT);

  // Here I'm trying to render the triangle.. All this was originally in its own classes, but even this doesnt work
	glUseProgram(bland_prog.prog);
	glEnableVertexAttribArray(0);
	glVertexAttribPointer(0, 4, GL_FLOAT, false, 0, NULL);
	glBindBuffer(GL_ELEMENT_ARRAY_BUFFER, bufIdx);
	glDrawElements(GL_TRIANGLES, 3, GL_UNSIGNED_INT, 0);

	glfwSwapBuffers(window);
	glfwPollEvents();
}
// Finish
glDeleteVertexArrays(1, &vao);
glfwDestroyWindow(window);
glfwTerminate();
return 0;

Here’s my shader initialization (shortened):

GLuint vert = glCreateShader(GL_VERTEX_SHADER);
GLuint frag = glCreateShader(GL_FRAGMENT_SHADER);
prog = glCreateProgram();
// I've checked by printing that the contents of vertSoruce and fragSource are indeed there.
glShaderSource(vert, 1, &vertSource, 0);
glShaderSource(frag, 1, &fragSource, 0); 
glCompileShader(vert);
glCompileShader(frag);
int success;
char infoLog[512];
glGetShaderiv(vert, GL_COMPILE_STATUS, &success);
if (!success) {
	glGetShaderInfoLog(vert, 512, NULL, infoLog);
	std::cout << "Vertex shader compilation failure: " << infoLog << std::endl;
}
success = 0;
glGetShaderiv(frag, GL_COMPILE_STATUS, &success);
if (!success) {
	glGetShaderInfoLog(frag, 512, NULL, infoLog);
	std::cout << "Fragment shader compilation failure: " << infoLog << std::endl;
}
glAttachShader(prog, vert);
glAttachShader(prog, frag);
glLinkProgram(prog);
success = 0;
glGetProgramiv(prog, GL_LINK_STATUS, &success);
if (!success) {
	glGetProgramInfoLog(prog, 512, NULL, infoLog);
	std::cout << "Shader linking failure: " << infoLog << std::endl;
}
glDeleteShader(vert);
glDeleteShader(frag);
attrPos = glGetAttribLocation(prog, "vs_Pos");

And here are my actual shaders:

#version 330
in vec4 vs_Pos;
out vec3 col;
void main(){	
col = vs_Pos.xyz;
gl_Position = vs_Pos;
}
#version 330
in vec3 col;
layout(location = 0) out vec4 out_Col;
void main(){
out_Col = vec4(0, 0, 0, 1);//vec4(col, 1);
}

I just can’t figure out what I did wrong. Looking at the output from GLintercept, there doesn’t seem to be any issue with wrongly using the various buffer IDs either… No compilation/linking errors… No GL_INVALID_OPERATION …

Help much appreciated!

here is a response that may resamble the likely stupidity of the error:

… your way of putting it suggests that the output contains other error-related output than the specific that you expect…

To my taste you use glEnable() and glHint() early, but that’s probably nothing. None of it will relate to one triangle anyway … but you left out the winding-order (clockwise/counter-clockwise). If the default is not counter-clockwise or both, you may look at the not-drawn-backside.

You could leave out the block between
GLuint vao;
… to …
glClearColor(…);

I don’t know how “GLintercept” works. Does it error-test after each draw-call?
If the infoLog is empty there’s only the draw-loop to test, right?

… hm … the culprit could be leaving out binding the “GL_ARRAY_BUFFER” for each draw-call.