glGenBuffers crashes with segfault

glGenBuffers seems to only work when I call it in the same cpp file that I do this in:


        SDL_GLContext _glContext = SDL_GL_CreateContext(_window);

        if(_glContext == 0)
            fatalError("GL Context could not be created.");

        //glewExperimental = GL_TRUE; //This has no effect.
        GLenum glewError = glewInit();
        if (glewError != GLEW_OK)
            fatalError("Could not initialize glew.");
        else
            std::cout << "initialized glew
"; //This is the text that shows.
      

However, I want to call glGenBuffers in a separate cpp file. The code crashes in that separate file below:


    if (_vboID == 0) //i set this GLuint variable to zero before.
    {
        glGenBuffers(1, &_vboID); //crashes here with segfault
        std::cout << "gen'd buffers!
";
    }

Debug mode shows a segfault occurring as soon as the program reaches the glGenBuffers line.

Other info I can provide:
SDL_GL_CONTEXT_MAJOR_VERSION: 17
SDL_GL_CONTEXT_MAJOR_VERSION: 18
OS: Windows 7
Glew: 1.13.0
SDL2: 2.0…4
Compiler: MinGW32

Please let me know if I’m missing anything in my description. I’ll happily fill in the gaps if it means you can help me solve my problem. Thanks! -Michael

Make sure you are settings the correct major and minor versions (3.3 for example), correct SDL flags and attaching the GL context with SDL_GL_CreateContext!
Then, before you call glGenBuffers, try to print it, like std::cout << glGenBuffers << std::endl; (WITHOUT BRACKETS!!)
If it prints 0, you are calling it before you initialize GLEW, so the function results in a null pointer, so it can cause a segfault.
Let me know!

EDIT:
Compare with my code if you require it:

// Init SDL Library
if (SDL_Init(SDL_INIT_EVERYTHING))
std::cout << “SDL not initialized correctly” << std::endl;

// Setup SDL for OpenGL
SDL_GL_SetAttribute(SDL_GL_RED_SIZE, 8);
SDL_GL_SetAttribute(SDL_GL_GREEN_SIZE, 8);
SDL_GL_SetAttribute(SDL_GL_BLUE_SIZE, 8);
SDL_GL_SetAttribute(SDL_GL_DEPTH_SIZE, 16);
SDL_GL_SetAttribute(SDL_GL_DOUBLEBUFFER, 1);
SDL_GL_SetAttribute(SDL_GL_CONTEXT_MAJOR_VERSION, 3);
SDL_GL_SetAttribute(SDL_GL_CONTEXT_MINOR_VERSION, 3);
SDL_GL_SetAttribute(SDL_GL_CONTEXT_PROFILE_MASK, SDL_GL_CONTEXT_PROFILE_CORE);
SDL_GL_SetAttribute(SDL_GL_SHARE_WITH_CURRENT_CONTEXT, 1);
SDL_SetHint(SDL_HINT_RENDER_OPENGL_SHADERS, “1”);

// Create window and renderer
window = SDL_CreateWindow(“Game Engine”, SDL_WINDOWPOS_CENTERED, SDL_WINDOWPOS_CENTERED, 1280, 720, SDL_WINDOW_SHOWN | SDL_WINDOW_OPENGL);

// Create the OpenGL context
context = SDL_GL_CreateContext(window);
SDL_GL_MakeCurrent(window, context);

// Init GLEW
glewExperimental = GL_TRUE;
if (glewInit() != GLEW_OK)
std::cout << “GLEW not initialized correctly” << std::endl;

Thanks FonzTech. I checked and yes, “std::cout << glGenBuffers << std::endl;” is printing a 0! But I’m initializing glew already in my first class, MainGame.cpp. I added the “SDL_GL_MakeCurrent(_window, _glContext);” line though, to no avail.

So somehow I have to make my Sprite.cpp file see the GL context created in my MainGame.cpp file. The context and window get created just fine in MainGame. So out of desperation I passed the context and window to Sprite and added this to Sprite.cpp:


std::cout << SDL_GL_MakeCurrent(window, context) << std::endl;

SDL_GL_MakeCurrent returns 0. So that’s good. glGenBuffers still crashes when I don’t do the cout you suggested though.

You mentioned something about using correct SDL flags. Can you elaborate please?

Here’s my renderer and OpenGL info:
Renderer: AMD Radeon HD 7800 Series
OpenGL: Version 4.4
OpenGL Driver version: 16.150.211.0 (pressing the “check for updated drivers” link in GLView leads to a server timeout page… every time.)

EDIT: I might as well just throw my whole code in there for you guys to see:

MainGame.cpp


#include "MainGame.h"
#include <iostream>


    MainGame::MainGame()
    {
        _window = 0;
        _SCREEN_WIDTH = 1024;
        _SCREEN_HEIGHT = 768;
        _gameState = PLAY;
    };

    MainGame::~MainGame()
    {
    };

    void fatalError(std::string errorString)
    {
        std::cout << errorString << std::endl;
        std::cout << "Enter any key to quit..." << std::endl;
        int tmp;
        std::cin >> tmp;
//        SDL_QUIT;
    }

    void MainGame::run()
    {
        MainGame::initSystems();
        MainGame::gameLoop();
    };

    void MainGame::initSystems()
    {   //initialize SDL
        SDL_Init(SDL_INIT_EVERYTHING);
        _window = SDL_CreateWindow("Game Engine", SDL_WINDOWPOS_CENTERED
                                   ,SDL_WINDOWPOS_CENTERED, _SCREEN_WIDTH
                                   , _SCREEN_HEIGHT, SDL_WINDOW_OPENGL);
        if (_window == 0)
            fatalError("SDL Window could not be opened.");

        SDL_GLContext _glContext = SDL_GL_CreateContext(_window);
        SDL_GL_MakeCurrent(_window, _glContext);

        if(_glContext == 0)
            fatalError("GL Context could not be created.");

        glewExperimental = GL_TRUE;
        SDL_GL_SetAttribute(SDL_GL_CONTEXT_MAJOR_VERSION, 3);
        SDL_GL_SetAttribute(SDL_GL_CONTEXT_MINOR_VERSION, 3);
        GLenum glewError = glewInit();
        if (glewError != GLEW_OK)
            fatalError("Could not initialize glew.");
        else
            std::cout << "initialized glew
";

        SDL_GL_SetAttribute(SDL_GL_DOUBLEBUFFER, 1);
        glClearColor(0.0f, 0.0f, 1.0f, 0.0f);

        _sprite.init(-1, -1, 1, 1, _window, _glContext);
    };

    void MainGame::processInput()
    {
        SDL_Event evnt;

        while (SDL_PollEvent(&evnt) == true)
        {
            switch(evnt.type)
            {
            case SDL_QUIT:
                _gameState = EXIT;
            case SDL_MOUSEMOTION:
                std::cout << evnt.motion.x << "   " << evnt.motion.y << "
";
            }
        }
    };

    void MainGame::drawGame()
    {
        glClearDepth(1.0);
        glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT);

    }

    void MainGame::gameLoop()
    {
        while(_gameState != EXIT)
        {
            drawGame();
            processInput();
            SDL_GL_SwapWindow(_window);
        }

    };

Sprite.cpp:


#include "Sprite.h"

Sprite::Sprite()
{
    _vboID = 0;
}

Sprite::~Sprite()
{
    if(_vboID != 0)
        glDeleteBuffers(1, &_vboID);
}


void Sprite::init(float x, float y, float w, float h, SDL_Window *window, SDL_GLContext context) //, SDL_GLContext *context, SDL_Window* window)
{
    _x = x;
    _y = y;
    _w = w;
    _h = h;

    //TODO: Why doesn't this class see the GL context? Research scope.

    std::cout << "MakeCurrent result: " << SDL_GL_MakeCurrent(window, context) << std::endl;

//    std::cout << "Worked. Using version: " << SDL_GL_CONTEXT_MAJOR_VERSION << "." << SDL_GL_CONTEXT_MINOR_VERSION << std::endl;
SDL_GL_SetAttribute(SDL_GL_CONTEXT_MAJOR_VERSION, 3);
SDL_GL_SetAttribute(SDL_GL_CONTEXT_MINOR_VERSION, 3);
    if (_vboID == 0)
    {
//        std::cout << glGenBuffers << std::endl;
        glGenBuffers(1, &_vboID);
        std::cout << "gen'd buffers!
";
    }

    std::cout <<"got past that";
    float vertData[12]; //12-- 6 (x,y) coordinates
    vertData[0] = x + w;
    vertData[1] = y + h;
    vertData[2] = x;
    vertData[3] = y + h;
    vertData[4] = x;
    vertData[5] = y;
    vertData[6] = x;
    vertData[7] = y;
    vertData[8] = x + w;
    vertData[9] = y;
    vertData[10] = x + w;
    vertData[11] = y + h;

    glBindBuffer(GL_ARRAY_BUFFER, _vboID);
    //STATIC_DRAW tells the GPU we're only going to draw something once.
    glBufferData(GL_ARRAY_BUFFER, sizeof(vertData), vertData, GL_STATIC_DRAW);
    //You can free that G-RAM
    glBindBuffer(GL_ARRAY_BUFFER, 0);
}

void Sprite::draw()
{
    glBindBuffer(GL_ARRAY_BUFFER, _vboID);

    //give only basic position, sending only 1 vertex attrib array
    glEnableVertexAttribArray(0);
    //tells OpenGl where the data is, where to start, size, etc.
    glVertexAttribPointer(0, 2, GL_FLOAT, GL_FALSE, 0, 0);
    //triangles, squares, etc. determines how GPU draws and fills in vertices
    //You can put 6 below because you told OpenGL that each vertex is 2 elements above. So It knows 12 elements in the array is 6 points
    glDrawArrays(GL_TRIANGLES, 0, 6);

    glDisableVertexAttribArray(0);

    glBindBuffer(GL_ARRAY_BUFFER, 0);

}

i have tried also your code, it works as intended
if you are statically linking glew, you must define GLEW_STATIC and set glewExperimental to true



#include <SDL2/SDL.h>
#include <iostream>
#include <GL/glew.h>

#define GLEW_STATIC


int main(int argc, char* argv[])
{

	// Init SDL Library
	if (SDL_Init(SDL_INIT_EVERYTHING))
	{
		std::cout << "SDL not initialized correctly" << std::endl;
		exit(-1);
	}

	// Setup SDL for OpenGL
	SDL_GL_SetAttribute(SDL_GL_RED_SIZE, 8);
	SDL_GL_SetAttribute(SDL_GL_GREEN_SIZE, 8);
	SDL_GL_SetAttribute(SDL_GL_BLUE_SIZE, 8);
	SDL_GL_SetAttribute(SDL_GL_DEPTH_SIZE, 16);
	SDL_GL_SetAttribute(SDL_GL_DOUBLEBUFFER, 1);
	SDL_GL_SetAttribute(SDL_GL_CONTEXT_MAJOR_VERSION, 3);
	SDL_GL_SetAttribute(SDL_GL_CONTEXT_MINOR_VERSION, 3);
	SDL_GL_SetAttribute(SDL_GL_CONTEXT_PROFILE_MASK, SDL_GL_CONTEXT_PROFILE_CORE);
	SDL_GL_SetAttribute(SDL_GL_SHARE_WITH_CURRENT_CONTEXT, 1);
	SDL_SetHint(SDL_HINT_RENDER_OPENGL_SHADERS, "1");

	// Create window and renderer
	SDL_Window* window = SDL_CreateWindow("Game Engine", SDL_WINDOWPOS_CENTERED, SDL_WINDOWPOS_CENTERED, 1280, 720, SDL_WINDOW_SHOWN | SDL_WINDOW_OPENGL);

	// Create the OpenGL context
	SDL_GLContext context = SDL_GL_CreateContext(window);
	SDL_GL_MakeCurrent(window, context);

	// Init GLEW
	glewExperimental = GL_TRUE;
	if (glewInit() != GLEW_OK)
	{
		std::cout << "GLEW not initialized correctly" << std::endl;
		exit(-1);
	}

	bool running = true;

	float vertices[] = {
		0, 0, 0, 
		1, 0, 0,
		0, 1, 0,
	};

	unsigned int vertexarray, buffer;
	glGenVertexArrays(1, &vertexarray);
	glBindVertexArray(vertexarray);

	glGenBuffers(1, &buffer);
	glBindBuffer(GL_ARRAY_BUFFER, buffer);
	glBufferData(GL_ARRAY_BUFFER, sizeof(vertices), vertices, GL_STATIC_DRAW);

	glVertexAttribPointer(0, 3, GL_FLOAT, false, sizeof(float) * 3, 0);
	glEnableVertexAttribArray(0);

	glBindVertexArray(0);

	while (running)
	{
		SDL_Event e;
		while (SDL_PollEvent(&e))
		{
			if (e.type == SDL_QUIT)
			{
				running = false;
				break;
			}
		}

		glClearColor(0, 0, 1, 0);
		glClear(GL_COLOR_BUFFER_BIT);

		glBindVertexArray(vertexarray);
		glDrawArrays(GL_TRIANGLES, 0, 3);
		glBindVertexArray(0);

		SDL_GL_SwapWindow(window);
	}

	glDeleteVertexArrays(1, &vertexarray);
	glDeleteBuffers(1, &buffer);

	SDL_DestroyWindow(window);

	SDL_Quit();

	return 0;
}

John Connor, your DEFINE GLEW_STATIC comment solved it!! Thank you thank you thank you thank you!!!

So that’s why the code worked when I moved it all to MainGame.cpp; I’d already defined GLEW_STATIC in MainGame.h. I hadn’t yet defined it in Sprite.h. So when I defined it there too, it worked!

Thanks for helping me get past this obstacle everyone!

That’s not actually the case.

You can cross-check this with the GLEW source code if you wish; glewExperimental actually just relaxes the strictness for extension checking, so that an extension can be reported as available if it’s entry points are present but it’s not in the GL_EXTENSIONS string. So:

static GLboolean _glewInit_GL_ARB_vertex_buffer_object (GLEW_CONTEXT_ARG_DEF_INIT)
{
  GLboolean r = GL_FALSE;

  r = ((glBindBufferARB = (PFNGLBINDBUFFERARBPROC)glewGetProcAddress((const GLubyte*)"glBindBufferARB")) == NULL) || r;
  r = ((glBufferDataARB = (PFNGLBUFFERDATAARBPROC)glewGetProcAddress((const GLubyte*)"glBufferDataARB")) == NULL) || r;
  r = ((glBufferSubDataARB = (PFNGLBUFFERSUBDATAARBPROC)glewGetProcAddress((const GLubyte*)"glBufferSubDataARB")) == NULL) || r;
  r = ((glDeleteBuffersARB = (PFNGLDELETEBUFFERSARBPROC)glewGetProcAddress((const GLubyte*)"glDeleteBuffersARB")) == NULL) || r;
  r = ((glGenBuffersARB = (PFNGLGENBUFFERSARBPROC)glewGetProcAddress((const GLubyte*)"glGenBuffersARB")) == NULL) || r;
  r = ((glGetBufferParameterivARB = (PFNGLGETBUFFERPARAMETERIVARBPROC)glewGetProcAddress((const GLubyte*)"glGetBufferParameterivARB")) == NULL) || r;
  r = ((glGetBufferPointervARB = (PFNGLGETBUFFERPOINTERVARBPROC)glewGetProcAddress((const GLubyte*)"glGetBufferPointervARB")) == NULL) || r;
  r = ((glGetBufferSubDataARB = (PFNGLGETBUFFERSUBDATAARBPROC)glewGetProcAddress((const GLubyte*)"glGetBufferSubDataARB")) == NULL) || r;
  r = ((glIsBufferARB = (PFNGLISBUFFERARBPROC)glewGetProcAddress((const GLubyte*)"glIsBufferARB")) == NULL) || r;
  r = ((glMapBufferARB = (PFNGLMAPBUFFERARBPROC)glewGetProcAddress((const GLubyte*)"glMapBufferARB")) == NULL) || r;
  r = ((glUnmapBufferARB = (PFNGLUNMAPBUFFERARBPROC)glewGetProcAddress((const GLubyte*)"glUnmapBufferARB")) == NULL) || r;

  return r;
}

And:

  GLEW_ARB_vertex_buffer_object = _glewSearchExtension("GL_ARB_vertex_buffer_object", extStart, extEnd);
  if (glewExperimental || GLEW_ARB_vertex_buffer_object) GLEW_ARB_vertex_buffer_object = !_glewInit_GL_ARB_vertex_buffer_object(GLEW_CONTEXT_ARG_VAR_INIT);

(both from glew.c)

So it’s nothing to do with statically linking to GLEW, and setting glewExperimental if statically linking is just voodoo programming.

Don’t forget that if you put GLEW_STATIC in your code before including glew.h, you must link your project against glew32s.lib (with the letter S after glew32)!!