GL_INVALID_VALUE error generated. Program handle does not refer to an object generated by OpenGL

This is the other error that I get

GL CALLBACK: ** GL ERROR ** type = 0x824c, severity = 0x9146, message = GL_INVALID_VALUE error generated. Program handle does not refer to an object generated by OpenGL.

This is what I use to test the error


#pragma once
#include "GL/glew.h"
#include <iostream>

#define ASSERT(x) if (!(x)) __debugbreak();
#define GLCall(x) GLClearError();\
    ASSERT(GLLogCall(#x, __FILE__, __LINE__))

static void GLClearError() {
    while (!glGetError()) {


static bool GLLogCall(const char* function, const char* file, int line) {
    while (GLenum error = glGetError()) {
        std::cout << "Error " << error << " " << function << " " << file << ":" << line << "\n";
        return false;
    return true;

static void GLGetError() {
    bool haserr = false;
    if (GLenum error = glGetError()) {
        haserr = true;
        std::cout << "My Error " << error << "\n";
    while (GLenum error = glGetError()) {
        std::cout << "My Error " << error << "\n";
    if (haserr) {
        int non = 0;

here is the code that must have triggered it

#include "Mesh.h"

Mesh::Mesh(vector<Vertex> inverts, vector<GLuint> ininds, vector<MatTexture> textu)
	mVertices = inverts;
	indices = ininds;
	textures = textu;
	notex = false;

Mesh::Mesh(vector<Vertex> inverts, vector<GLuint> ininds, aiColor4D diff, aiColor4D spec)
	mVertices = inverts;
	indices = ininds;
	diffuse = diff;
	specular = spec;
	notex = true;

    glDeleteVertexArrays(1, &mVAO);
    glDeleteBuffers(1, &mVBO);
	glDeleteBuffers(1, &mIBO);

void Mesh::init()
	glGenVertexArrays(1, &mVAO);

	glGenBuffers(1, &mVBO);
	glBindBuffer(GL_ARRAY_BUFFER, mVBO);

	glBufferData(GL_ARRAY_BUFFER, mVertices.size() * sizeof(Vertex), &mVertices[0], GL_STATIC_DRAW);

	// Vertex Positions

	glVertexAttribPointer(0, 3, GL_FLOAT, GL_FALSE, sizeof(Vertex), (GLvoid*)0);

	// Vertex Normals

	glVertexAttribPointer(1, 3, GL_FLOAT, GL_FALSE, sizeof(Vertex), (GLvoid*)(3 * sizeof(GLfloat)));

	// Vertex Texture Coords

	glVertexAttribPointer(2, 2, GL_FLOAT, GL_FALSE, sizeof(Vertex), (GLvoid*)(6 * sizeof(GLfloat)));

    glVertexAttribIPointer(3, 4, GL_UNSIGNED_INT, sizeof(Vertex), (GLvoid*)(8 * sizeof(GLfloat)));
    glVertexAttribPointer(4, 4, GL_FLOAT, GL_FALSE, sizeof(Vertex),
		(GLvoid*)((8 * sizeof(GLfloat)) + (4 * sizeof(GLuint))));

	glGenBuffers(1, &mIBO);
	glBufferData(GL_ELEMENT_ARRAY_BUFFER, indices.size() * sizeof(GLuint), &indices[0], GL_STATIC_DRAW);


	hasInit = true;

void Mesh::render(ShaderProgram shader)
	shader.setUniform("notex", notex);
	if (notex) {
		glm::vec4 mDiff = glm::vec4(diffuse.r, diffuse.g, diffuse.b, 1);
		shader.setUniform("diffcol", mDiff);
		shader.setUniformSampler("difftex", 0);
		unsigned int diffuseIdx = 0;
		unsigned int normalIdx = 0;
		for (unsigned int i = 0; i < textures.size(); i++) {
			string name;
			switch ( {
			case aiTextureType_DIFFUSE:
				name = "diffuse" + to_string(diffuseIdx++);
			case aiTextureType_NORMALS:
				name = "normal" + to_string(normalIdx++);
	if (hasInit) {
		if (nfirst) {
			int non = 0;
		glDrawElements(GL_TRIANGLES, indices.size(), GL_UNSIGNED_INT, nullptr);
		nfirst = true;

	for (unsigned int i = 0; i < textures.size(); i++);

You’re calling some GL function taking a program object handle as input, and you’re not feeding it correctly.

No, it’s obviously not. You’re apparently getting the above detailed GL error message from a glDebugMessageCallback() that your program installed.

Use it to isolate which GL function you’re calling with invalid arguments. Try running in a debugger for instance.

Your code snippet only shows your shader program wrappers, not GL calls. So you’re on your own to determine which embedded GL call you called with a bogus argument (or arguments).

OK, so the message might have come from a different function, but the error message I got was from glBindVertexArray(mVAO) because the GLCall as I was told clears the errors and then checks for them. I do have this in the header. GLuint mVAO, mVBO, mIBO; I can’t seem to figure out what I did wrong.


#pragma once
#include "Texture2D.h"
#include "ShaderProgram.h"
#include "GL/glew.h"	// Important - this header must come before glfw3 header
#include "GLFW/glfw3.h"

#include "glm/glm.hpp"
#include <vector>
#include "Error.h"
using namespace std;
struct Vertex {
    // position
    glm::vec3 Position;
    // normal
    glm::vec3 Normal;
    // texCoords
    glm::vec2 TexCoords;

    // //tangent
    //glm::vec3 Tangent;
    // //bitangent
    //glm::vec3 Bitangent;

    //bone indexes which will influence this vertex
    unsigned int m_BoneIDs[MAX_BONE_INFLUENCE];
    //weights from each bone
    float m_Weights[MAX_BONE_INFLUENCE];


struct BoneInfo
    unsigned int id;    
    glm::mat4 offset;

class Mesh
    Mesh(vector<Vertex> inverts, vector<GLuint> ininds, vector<MatTexture> textu);
    Mesh(vector<Vertex> inverts, vector<GLuint> ininds, aiColor4D diff, aiColor4D spec);
    void init();
    void render(ShaderProgram shader);

    bool nfirst = false;
    bool hasInit = false;
    aiColor4D diffuse;
    aiColor4D specular;
    bool notex = false;
    GLuint mVAO, mVBO, mIBO;
    vector<Vertex> mVertices;
    vector<GLuint> indices;
    vector<MatTexture> textures;

this all seems fine it doesn’t make sense.

Using classes for managing OpenGL resources and they do not have a full set of copy and/or move constructors, assignment operators (even if some of those are marked = deleted) it is very easy to run into the issues described on the wiki.

I found out what was wrong, I did Init() after the load model and also I didn’t set up the shader.