Vertex buffer objects not shared between glx contexts.

#1

Hi!
For rendering to FBO, I needed to create separating glx contexts otherwise it doesn’t work.

So I created a first hidden context with share all opengl resources (textures, etc…) with the other glx contexts.

The code looks like this :

First I created an interface for compatibility with other libraries.


#ifndef ODFAEG_ICONTEXT_HPP
#define ODFAEG_ICONTEXT_HPP
#include "contextSettings.hpp"
#include <SFML/Window/WindowHandle.hpp>
namespace odfaeg {
    namespace window {
        class IContext {
            public :
            virtual bool setActive(bool active) = 0;
            virtual void create(sf::WindowHandle handle, IContext* sharedContext = nullptr) = 0;
            virtual void create(ContextSettings& settings, unsigned int width, unsigned int height, IContext* sharedContext = nullptr) = 0;
            virtual const ContextSettings& getSettings() const = 0;
            virtual void display() = 0;
            virtual void setVerticalSyncEnabled(bool enabled) = 0;
        };
    }
}
#endif

Then I created a second class to create offscreen Contexts (for any libraries)
The ContextFactory creates a context depending of the chosen library.


#include "../../../include/odfaeg/Window/context.hpp"
namespace odfaeg {
    namespace window {
        Context::Context() :
        m_context(nullptr) {
        }
        Context::Context(ContextSettings settings, unsigned int width, unsigned int height) {
            m_context = ContextFactory::create();
            m_context->create(settings, width, height);
            m_context->setActive(true);
        }
        Context::Context(sf::WindowHandle handle) {
            m_context = ContextFactory::create();
            m_context->create(handle);
            m_context->setActive(true);
        }
        void Context::create(ContextSettings settings, unsigned int width, unsigned int height) {
            if (!m_context)
                delete m_context;
            m_context = ContextFactory::create();
            m_context->create(settings, width, height);
            m_context->setActive(true);
        }
        void Context::create(sf::WindowHandle handle) {
            if (!m_context)
                delete m_context;
            m_context = ContextFactory::create();
            m_context->create(handle);
            m_context->setActive(true);
        }
        void Context::display() {
            m_context->display();
        }
        void Context::setVerticalSyncEnabled(bool enabled) {
            if (m_context)
                m_context->setVerticalSyncEnabled(enabled);
        }
        bool Context::setActive(bool active) {
            return m_context && m_context->setActive(true);
        }
    }
}

The third class is a class I use to create context with my library. (This is where I create the hidden shared context)
This class inherits from ContextImplType (a typedef depending on the plateform, so if the plateform is linux I inherit from GlxContext, if its windows I inherits from WglContext, etc…)


#include "../../../include/odfaeg/Window/contextImpl.hpp"
#include <SFML/System/Mutex.hpp>
#include <SFML/System/Lock.hpp>
#include <iostream>
using namespace sf;
namespace {
    ThreadLocalPtr<odfaeg::window::ContextImpl> current_ContextImpl (nullptr);
    sf::Mutex mutex;
}
namespace odfaeg {
    namespace window {
        using namespace sf;
        IContext* ContextImpl::sharedContext = nullptr;
        unsigned int ContextImpl::nbContexts = 0;
        ContextImpl::ContextImpl() {
            if (nbContexts == 0) {
                sharedContext = new ContextImplType();
                ContextSettings settings;
                sharedContext->create(settings, 1, 1);
                sharedContext->setActive(true);
                nbContexts++;
            }
        }
        void ContextImpl::create(ContextSettings& settings, unsigned int width, unsigned int height, IContext* shared) {
            sharedContext->setActive(false);
            ContextImplType::create(settings, width, height, sharedContext);
        }
        void ContextImpl::create(sf::WindowHandle handle, IContext* shared) {
            sharedContext->setActive(false);
            ContextImplType::create(handle, sharedContext);
        }
        bool ContextImpl::setActive(bool active) {
            if (active) {
                if (this != current_ContextImpl) {
                    Lock lock(mutex);
                    if (ContextImplType::setActive(true)) {
                        current_ContextImpl = this;

                        return true;
                    } else {
                        return false;
                    }
                } else {
                    return true;
                }
            } else {
                if (this == current_ContextImpl) {
                    if (ContextImplType::setActive(false)) {
                        current_ContextImpl = nullptr;
                        return true;
                    } else {
                        return false;
                    }
                } else {
                    return true;
                }
            }
        }
        const ContextSettings& ContextImpl::getSettings() const {
            return ContextImplType::getSettings();
        }
        void ContextImpl::display() {
            ContextImplType::display();
        }
        void ContextImpl::setVerticalSyncEnabled (bool enabled) {
            //For later;
        }
        ContextImpl::~ContextImpl() {
            nbContexts--;
            if (nbContexts == 0) {
                sharedContext->setActive(false);
                delete sharedContext;
            }
        }
    }
}

And the last class is the plateform specific opengl context creation I use for my library. (GlxContext here)


#include "../../../include/odfaeg/Window/glxContext.hpp"
#include "../../../include/odfaeg/Window/display.hpp"
#include <cstring>
#include <iostream>
typedef GLXContext (*glXCreateContextAttribsARBProc)(::Display*, GLXFBConfig, GLXContext, Bool, const int*);
typedef Bool (*glXMakeContextCurrentARBProc)(::Display*, GLXDrawable, GLXDrawable, GLXContext);
namespace odfaeg {
    namespace window {
        bool GlxContext::ctxErrorOccurred = false;
        // Helper to check for extension string presence.  Adapted from:
        //   http://www.opengl.org/resources/features/OGLextensions/
        bool GlxContext::isExtensionSupported(const char *extList, const char *extension)
        {
          const char *start;
          const char *where, *terminator;

          /* Extension names should not have spaces. */
          where = strchr(extension, ' ');
          if (where || *extension == '\0')
            return false;

          /* It takes a bit of care to be fool-proof about parsing the
             OpenGL extensions string. Don't be fooled by sub-strings,
             etc. */
          for (start=extList;;) {
            where = strstr(start, extension);

            if (!where)
              break;

            terminator = where + strlen(extension);

            if ( where == start || *(where - 1) == ' ' )
              if ( *terminator == ' ' || *terminator == '\0' )
                return true;

            start = terminator;
          }

          return false;
        }
        int GlxContext::ctxErrorHandler(::Display *dpy, XErrorEvent *ev )
        {
            ctxErrorOccurred = true;
            return 0;
        }
        GlxContext::GlxContext() {
            m_display = Display::openDisplay();
            m_windowLess = true;
            pbuf = 0;
            ctx = nullptr;
        }
        XVisualInfo* GlxContext::getBestVisual(const ContextSettings& requested_settings) {
            ::Display* display = Display::openDisplay();
            static int visual_attribs[] =
            {
              GLX_CONTEXT_MAJOR_VERSION_ARB, requested_settings.versionMajor,
              GLX_CONTEXT_MINOR_VERSION_ARB, requested_settings.versionMinor,
              GLX_X_RENDERABLE    , True,
              GLX_DRAWABLE_TYPE   , GLX_WINDOW_BIT,
              GLX_RENDER_TYPE     , GLX_RGBA_BIT,
              GLX_X_VISUAL_TYPE   , GLX_TRUE_COLOR,
              GLX_RED_SIZE        , 8,
              GLX_GREEN_SIZE      , 8,
              GLX_BLUE_SIZE       , 8,
              GLX_ALPHA_SIZE      , 8,
              GLX_DEPTH_SIZE      , requested_settings.depthBits,
              GLX_STENCIL_SIZE    , requested_settings.stencilBits,
              GLX_DOUBLEBUFFER    , True,
              GLX_SAMPLE_BUFFERS  , (requested_settings.antiAliasingLevel > 0) ? 1 : 0,
              GLX_SAMPLES         , requested_settings.antiAliasingLevel,
              None
            };
            int glx_major, glx_minor;
            // FBConfigs were added in GLX version 1.3.
            if ( !glXQueryVersion( display, &glx_major, &glx_minor ) ||
                   ( ( glx_major == 1 ) && ( glx_minor < 3 ) ) || ( glx_major < 1 ) )
            {
                std::cerr<<"Invalid GLX version"<<std::endl;
                exit(1);
            }
            int fbcount;
            GLXFBConfig* fbc = glXChooseFBConfig(display, DefaultScreen(display), visual_attribs, &fbcount);
            if (!fbc)
            {
                std::cerr<<"Failed to retrieve a framebuffer config"<<std::endl;
                exit(1);
            }
            // Pick the FB config/visual with the most samples per pixel
            int best_fbc = -1, bestScore = 0, bestDepthBits=0, bestStencilBits = 0, bestAntiAliasingLevel = 0;
            for (unsigned int i=0; i<fbcount; ++i)
            {
                XVisualInfo *vi = glXGetVisualFromFBConfig( display, fbc[i] );
                if ( vi )
                {
                    int depthBits=0, stencilBits=0, antiAliasingLevel=0;
                    glXGetConfig(display,vi,GLX_DEPTH_SIZE, &depthBits);
                    glXGetConfig(display,vi,GLX_STENCIL_SIZE, &stencilBits);
                    glXGetConfig(display,vi,GLX_SAMPLES, &antiAliasingLevel);
                    int depthBitsDiff = bestDepthBits - depthBits;
                    int stencilBitsDiff = bestStencilBits - stencilBits;
                    int antiAliasingDiff = bestAntiAliasingLevel - antiAliasingLevel;
                    depthBitsDiff *= (depthBitsDiff > 0) ? 10000 : 1;
                    stencilBitsDiff *= (stencilBitsDiff > 0) ? 10000 : 1;
                    antiAliasingDiff *= (antiAliasingDiff > 0) ? 10000 : 1;
                    int score = std::abs(depthBitsDiff) + std::abs(stencilBitsDiff) + std::abs(antiAliasingDiff);
                    if (score > bestScore) {
                        bestScore = score;
                        best_fbc = i;
                        bestDepthBits = depthBits;
                        bestStencilBits = stencilBits;
                        bestAntiAliasingLevel = antiAliasingLevel;
                    }
                }
                XFree(vi);
            }
            // Get a visual
            XVisualInfo* vi = glXGetVisualFromFBConfig(display, fbc[ best_fbc ]);
             // Be sure to free the FBConfig list allocated by glXChooseFBConfig()
            XFree( fbc );
            Display::closeDisplay(display);
            if (!vi) {
                std::cerr<<"Failed to get the best visual, check your driver installation"<<std::endl;
                return new XVisualInfo();
            }
            return vi;
        }
        void GlxContext::create(ContextSettings& settings, unsigned int width, unsigned int height, IContext* sharedContext) {
            GLXContext shared = (sharedContext != nullptr) ? static_cast<GlxContext*>(sharedContext)->ctx : nullptr;
            XVisualInfo* vi = getBestVisual(settings);
            int versionMajor=0, versionMinor=0, depthBits=0, stencilBits=0, antiAliasingLevel=0;
            glXGetConfig(m_display,vi,GLX_DEPTH_SIZE, &depthBits);
            glXGetConfig(m_display,vi,GLX_STENCIL_SIZE, &stencilBits);
            glXGetConfig(m_display,vi,GLX_SAMPLES, &antiAliasingLevel);
            settings.depthBits = depthBits;
            settings.stencilBits = stencilBits;
            settings.antiAliasingLevel = antiAliasingLevel;
            // Get a GLXFBConfig that matches the visual
            GLXFBConfig* bestFbc = nullptr;

            // We don't supply attributes to match against, since
            // the visual we are matching against was already
            // deemed suitable in selectBestVisual()
            int nbConfigs = 0;
            GLXFBConfig* configs = glXChooseFBConfig(m_display, DefaultScreen(m_display), NULL, &nbConfigs);
            int bestVersionMajor = 0, bestVersionMinor = 0;
            for (int i = 0; configs && (i < nbConfigs); ++i)
            {
                XVisualInfo* visual = glXGetVisualFromFBConfig(m_display, configs[i]);

                if (!visual)
                    continue;

                if (visual->visualid == vi->visualid)
                {
                    bestFbc = &configs[i];
                    XFree(visual);
                    break;
                }
                int versionMajor = 0, versionMinor = 0;
                glXGetConfig(m_display,vi,GLX_CONTEXT_MAJOR_VERSION_ARB, &versionMajor);
                glXGetConfig(m_display,vi,GLX_CONTEXT_MINOR_VERSION_ARB, &versionMinor);
                if (versionMajor > bestVersionMajor) {
                    bestVersionMajor = versionMajor;
                    settings.versionMajor = versionMajor;
                }
                if (versionMinor > bestVersionMinor) {
                    bestVersionMinor = versionMinor;
                    settings.versionMinor = versionMinor;
                }
                XFree(visual);
            }
            m_window = DefaultRootWindow(m_display);
            m_windowLess = true;
            glXCreateContextAttribsARBProc glXCreateContextAttribsARB = 0;
            glXCreateContextAttribsARB = (glXCreateContextAttribsARBProc)
            glXGetProcAddressARB( (const GLubyte *) "glXCreateContextAttribsARB" );
            glXMakeContextCurrentARBProc glXMakeContextCurrentARB = 0;
            glXMakeContextCurrentARB = (glXMakeContextCurrentARBProc)
            glXGetProcAddressARB( (const GLubyte *) "glXMakeContextCurrent");
            // Install an X error handler so the application won't exit if GL 3.0
            // context allocation fails.
            //
            // Note this error handler is global.  All display connections in all threads
            // of a process use the same error handler, so be sure to guard against other
            // threads issuing X commands while this code is running.
            int (*oldHandler)(::Display*, XErrorEvent*) =
              XSetErrorHandler(&ctxErrorHandler);
            // Get the default screen's GLX extension list
            const char *glxExts = glXQueryExtensionsString(m_display,
                                                          DefaultScreen(m_display ) );
            // Check for the GLX_ARB_create_context extension string and the function.
            // If either is not present, use GLX 1.3 context creation method.
            if ( !isExtensionSupported( glxExts, "GLX_ARB_create_context") /*|| !isExtensionSupported( glxExts, "GLX_ARB_make_context_current" )*/ ||
               !glXCreateContextAttribsARB || !glXMakeContextCurrentARB)
            {
                std::cout<<"glXCreateContextAttribsARB() not found"
                " ... using old-style GLX context"<<std::endl;
                ctx = glXCreateContext(m_display, vi, NULL, GL_TRUE);
            } else {
                int context_attribs[] = {
                        GLX_CONTEXT_MAJOR_VERSION_ARB, settings.versionMajor,
                        GLX_CONTEXT_MINOR_VERSION_ARB, settings.versionMinor,
                        None
                };
                ctx = glXCreateContextAttribsARB(m_display, *bestFbc, shared, True, context_attribs);
                // Sync to ensure any errors generated are processed.
                XSync(m_display, False );
                if ( !ctxErrorOccurred && ctx ) {
                    /* create temporary pbuffer */
                    int pbuffer_attribs[] = {
                            GLX_PBUFFER_WIDTH, width,
                            GLX_PBUFFER_HEIGHT, height,
                            None
                    };
                    pbuf = glXCreatePbuffer(m_display, *bestFbc, pbuffer_attribs);
                    XSync(m_display, False);
                } else
                {
                  // Couldn't create GL 3.0 context.  Fall back to old-style 2.x context.
                  // When a context version below 3.0 is requested, implementations will
                  // return the newest context version compatible with OpenGL versions less
                  // than version 3.0.
                  // GLX_CONTEXT_MAJOR_VERSION_ARB = 1
                  context_attribs[1] = settings.versionMajor;
                  // GLX_CONTEXT_MINOR_VERSION_ARB = 0
                  context_attribs[3] = settings.versionMinor;

                  ctxErrorOccurred = false;
                  std::cout<<"Failed to create GL 3.0 context"
                          " ... using old-style GLX context"<<std::endl;
                  ctx = glXCreateContext(m_display, vi, shared, GL_TRUE);
                }
                // Sync to ensure any errors generated are processed.
                XSync(m_display, False );

                // Restore the original error handler
                XSetErrorHandler( oldHandler );
                if ( ctxErrorOccurred || !ctx )
                {
                    std::cerr<<"Failed to create an OpenGL context"<<std::endl;
                    exit(1);
                }
                // Verifying that context is a direct context
                if ( ! glXIsDirect (m_display, ctx ) )
                {
                    std::cout<<"Indirect GLX rendering context obtained"<<std::endl;
                }
                else
                {
                    std::cout<<"Direct GLX rendering context obtained"<<std::endl;
                }
            }
        }
        void GlxContext::create(::Window win, IContext* sharedContext) {
            GLXContext shared = (sharedContext != nullptr) ? static_cast<GlxContext*>(sharedContext)->ctx : nullptr;
            m_window = win;
            m_windowLess = false;
            // Retrieve the attributes of the target window
            XWindowAttributes windowAttributes;
            if (XGetWindowAttributes(m_display, m_window, &windowAttributes) == 0)
            {
                std::cerr << "Failed to get the window attributes" << std::endl;
                return;
            }

            // Get its visuals
            XVisualInfo tpl;
            tpl.screen   = DefaultScreen(m_display);
            tpl.visualid = XVisualIDFromVisual(windowAttributes.visual);
            int nbVisuals = 0;
            XVisualInfo* vi = XGetVisualInfo(m_display, VisualIDMask | VisualScreenMask, &tpl, &nbVisuals);
            int depthBits=0, stencilBits=0, antiAliasingLevel=0;
            glXGetConfig(m_display,vi,GLX_DEPTH_SIZE, &depthBits);
            glXGetConfig(m_display,vi,GLX_STENCIL_SIZE, &stencilBits);
            glXGetConfig(m_display,vi,GLX_SAMPLES, &antiAliasingLevel);
            settings.depthBits = depthBits;
            settings.stencilBits = stencilBits;
            settings.antiAliasingLevel = antiAliasingLevel;
            // Get a GLXFBConfig that matches the visual
            GLXFBConfig* bestFbc = nullptr;

            // We don't supply attributes to match against, since
            // the visual we are matching against was already
            // deemed suitable in selectBestVisual()
            int nbConfigs = 0;
            GLXFBConfig* configs = glXChooseFBConfig(m_display, DefaultScreen(m_display), NULL, &nbConfigs);
            int bestVersionMajor = 0, bestVersionMinor = 0;
            for (int i = 0; configs && (i < nbConfigs); ++i)
            {
                XVisualInfo* visual = glXGetVisualFromFBConfig(m_display, configs[i]);

                if (!visual)
                    continue;

                if (visual->visualid == vi->visualid)
                {
                    bestFbc = &configs[i];
                    XFree(visual);
                    break;
                }
                int versionMajor = 0, versionMinor = 0;
                glXGetConfig(m_display,vi,GLX_CONTEXT_MAJOR_VERSION_ARB, &versionMajor);
                glXGetConfig(m_display,vi,GLX_CONTEXT_MINOR_VERSION_ARB, &versionMinor);
                if (versionMajor > bestVersionMajor) {
                    bestVersionMajor = versionMajor;
                    settings.versionMajor = versionMajor;
                }
                if (versionMinor > bestVersionMinor) {
                    bestVersionMinor = versionMinor;
                    settings.versionMinor = versionMinor;
                }
                XFree(visual);
            }
            // Get the default screen's GLX extension list
            const char *glxExts = glXQueryExtensionsString(m_display,
                                                          DefaultScreen(m_display ) );
            // NOTE: It is not necessary to create or make current to a context before
            // calling glXGetProcAddressARB
            glXCreateContextAttribsARBProc glXCreateContextAttribsARB = 0;
            glXCreateContextAttribsARB = (glXCreateContextAttribsARBProc)
            glXGetProcAddressARB( (const GLubyte *) "glXCreateContextAttribsARB" );
            // Install an X error handler so the application won't exit if GL 3.0
            // context allocation fails.
            //
            // Note this error handler is global.  All display connections in all threads
            // of a process use the same error handler, so be sure to guard against other
            // threads issuing X commands while this code is running.
            int (*oldHandler)(::Display*, XErrorEvent*) =
              XSetErrorHandler(&ctxErrorHandler);
            // Check for the GLX_ARB_create_context extension string and the function.
            // If either is not present, use GLX 1.3 context creation method.
            if ( !isExtensionSupported( glxExts, "GLX_ARB_create_context" ) ||
               !glXCreateContextAttribsARB )
            {
                 std::cout<<"glXCreateContextAttribsARB() not found"
                       " ... using old-style GLX context"<<std::endl;
                 ctx = glXCreateNewContext(m_display, *bestFbc, GLX_RGBA_TYPE, shared, True );
            }
            // If it does, try to get a GL 3.0 context!
            else
            {
                int context_attribs[] =
                {
                    GLX_CONTEXT_MAJOR_VERSION_ARB, settings.versionMajor,
                    GLX_CONTEXT_MINOR_VERSION_ARB, settings.versionMinor,
                    //GLX_CONTEXT_FLAGS_ARB        , GLX_CONTEXT_FORWARD_COMPATIBLE_BIT_ARB,
                    None
                };
                ctx = glXCreateContextAttribsARB(m_display, *bestFbc, shared,
                                                  True, context_attribs );

                // Sync to ensure any errors generated are processed.
                XSync(m_display, False );
                if ( !ctxErrorOccurred && ctx ) {
                    std::cout<<"Created GL 3.0 context"<<std::endl;
                } else
                {
                  // Couldn't create GL 3.0 context.  Fall back to old-style 2.x context.
                  // When a context version below 3.0 is requested, implementations will
                  // return the newest context version compatible with OpenGL versions less
                  // than version 3.0.
                  // GLX_CONTEXT_MAJOR_VERSION_ARB = 1
                  context_attribs[1] = settings.versionMajor;
                  // GLX_CONTEXT_MINOR_VERSION_ARB = 0
                  context_attribs[3] = settings.versionMinor;

                  ctxErrorOccurred = false;

                  std::cout<<"Failed to create GL 3.0 context"
                          " ... using old-style GLX context"<<std::endl;
                  ctx = glXCreateContextAttribsARB(m_display, *bestFbc, shared,
                                                    True, context_attribs );
                }
                // Sync to ensure any errors generated are processed.
                XSync(m_display, False );

                // Restore the original error handler
                XSetErrorHandler( oldHandler );

                if ( ctxErrorOccurred || !ctx )
                {
                    std::cerr<<"Failed to create an OpenGL context"<<std::endl;
                    exit(1);
                }
                // Verifying that context is a direct context
                if ( ! glXIsDirect (m_display, ctx ) )
                {
                    std::cout<<"Indirect GLX rendering context obtained"<<std::endl;
                }
                else
                {
                    std::cout<<"Direct GLX rendering context obtained"<<std::endl;
                }
            }
        }
        bool GlxContext::setActive(bool current) {
            if (!ctx)
                return false;
            bool result;
            if (current)
            {
                if (m_windowLess) {
                    if (pbuf) {
                        /* try to make it the current context */
                        result = glXMakeContextCurrent(m_display, pbuf, pbuf, ctx);
                        if ( !result ) {
                                /* some drivers does not support context without default framebuffer, so fallback on
                                 * using the default window.
                                 */
                                result = glXMakeContextCurrent(m_display, DefaultRootWindow(m_display), DefaultRootWindow(m_display), ctx);
                                if ( !result){
                                        std::cerr<<"failed to make current"<<std::endl;
                                        exit(1);
                                }
                        }
                    } else {
                        result = glXMakeCurrent(m_display, m_window, ctx );
                    }
                } else {
                    result = glXMakeCurrent(m_display, m_window, ctx );
                }
            } else {
                result = glXMakeCurrent(m_display, None, NULL);
            }
            return result;
        }
        const ContextSettings& GlxContext::getSettings() const {
            return settings;
        }
        void GlxContext::display() {
            glXSwapBuffers(m_display, m_window);
        }
        void GlxContext::setVerticalSyncEnabled(bool enable) {
            //For later.
        }
        GlxContext::~GlxContext() {
            if (pbuf)
            {
                glXDestroyPbuffer(m_display, pbuf);
            }
            if (ctx) {
                glXDestroyContext(m_display, ctx );
            }
            Display::closeDisplay(m_display);
        }
    }
}

Ok so I create the context in the window :


//////////////////////////////////////////////////////////// 
void WindowImpl::create(VideoMode mode, const String& title, Uint32 style, const ContextSettings& settings)
        {
            // Recreate the WindowImpl implementation
            WindowImplType::create(mode, title, style, settings);
            m_context.create(getSystemHandle());
        }

And another context in the FBO, otherwise it doesn’t draw anything.


////////////////////////////////////////////////////////////
            bool RenderTextureImplFBO::create(unsigned int width, unsigned int height, window::ContextSettings settings, unsigned int textureId)
            {
                m_context.create(settings, width, height);
                // Create the framebuffer object
                GLuint frameBuffer = 0;
                glCheck(glGenFramebuffersEXT(1, &frameBuffer));
                m_frameBuffer = static_cast<unsigned int>(frameBuffer);
                if (!m_frameBuffer)
                {
                    std::cerr << "Impossible to create render texture (failed to create the frame buffer object)" << std::endl;
                    return false;
                }
                glCheck(glBindFramebufferEXT(GL_FRAMEBUFFER_EXT, m_frameBuffer));
                // Create the depth buffer if requested
                if (settings.depthBits > 0)
                {
                    GLuint depth = 0;
                    glCheck(glGenRenderbuffersEXT(1, &depth));
                    m_depthBuffer = static_cast<unsigned int>(depth);
                    if (!m_depthBuffer)
                    {
                        std::cerr << "Impossible to create render texture (failed to create the attached depth buffer)" << std::endl;
                        return false;
                    }
                    glCheck(glBindRenderbufferEXT(GL_RENDERBUFFER_EXT, m_depthBuffer));
                    glCheck(glRenderbufferStorageEXT(GL_RENDERBUFFER_EXT, GL_DEPTH_COMPONENT, width, height));
                    glCheck(glFramebufferRenderbufferEXT(GL_FRAMEBUFFER_EXT, GL_DEPTH_ATTACHMENT_EXT, GL_RENDERBUFFER_EXT, m_depthBuffer));
                }
                // Link the texture to the frame buffer
                glCheck(glFramebufferTexture2DEXT(GL_FRAMEBUFFER_EXT, GL_COLOR_ATTACHMENT0_EXT, GL_TEXTURE_2D, textureId, 0));
                // A final check, just to be sure...
                if (glCheckFramebufferStatusEXT(GL_FRAMEBUFFER_EXT) != GL_FRAMEBUFFER_COMPLETE_EXT)
                {
                    glCheck(glBindFramebufferEXT(GL_FRAMEBUFFER_EXT, 0));
                    std::cerr << "Impossible to create render texture (failed to link the target texture to the frame buffer)" << std::endl;
                    return false;
                }
                return true;
            }

Ok it works, textureId is shared between the FBO and the window context. But this is not the case of my vertex buffer object.

I create the vertex buffer object here :


               if(GLEW_ARB_vertex_buffer_object) {
                    if (m_needToUpdateVBO) {
                        if (m_vboVertexBuffer == 0) {
                            GLuint vbo;
                            glCheck(glGenBuffers(1, &vbo));
                            m_vboVertexBuffer = static_cast<unsigned int>(vbo);
                        }
                        if (m_oldVerticesSize != m_vertices.size()) {
                            glCheck(glBindBuffer(GL_ARRAY_BUFFER, m_vboVertexBuffer));
                            glCheck(glBufferData(GL_ARRAY_BUFFER_ARB, m_vertices.size() * sizeof(Vertex), &m_vertices[0], GL_DYNAMIC_DRAW));
                            glCheck(glBindBuffer(GL_ARRAY_BUFFER, 0));
                        } else {
                            GLvoid *pos_vbo = nullptr;
                            glCheck(glBindBuffer(GL_ARRAY_BUFFER, m_vboVertexBuffer));
                            pos_vbo = glMapBuffer(GL_ARRAY_BUFFER, GL_WRITE_ONLY);
                            if (pos_vbo != nullptr) {
                                memcpy(pos_vbo,&m_vertices[0],  m_vertices.size() * sizeof(Vertex));
                                glCheck(glUnmapBuffer(GL_ARRAY_BUFFER));
                                pos_vbo = nullptr;
                            }
                            glCheck(glBindBuffer(GL_ARRAY_BUFFER, 0));
                        }
                    }
                    m_oldVerticesSize = m_vertices.size();
                    glCheck(glBindBuffer(GL_ARRAY_BUFFER, m_vboVertexBuffer));
                    target.draw(&m_vertices[0], m_vertices.size(), m_primitiveType, states);
                    glCheck(glBindBuffer(GL_ARRAY_BUFFER, 0));
                }

And I bind it before drawing stuff here :


 // Setup the pointers to the vertices' components
               if (vertices) {
                    const char* data = reinterpret_cast<const char*>(vertices);
                    glCheck(glVertexPointer(3, GL_FLOAT, sizeof(Vertex), data + 0));
                    glCheck(glColorPointer(4, GL_UNSIGNED_BYTE, sizeof(Vertex), data + 12));
                    glCheck(glTexCoordPointer(2, GL_FLOAT, sizeof(Vertex), data + 16));
                }
                // Find the OpenGL primitive type
                static const GLenum modes[] = {GL_POINTS, GL_LINES, GL_LINE_STRIP, GL_TRIANGLES,
                                                   GL_TRIANGLE_STRIP, GL_TRIANGLE_FAN, GL_QUADS};
                GLenum mode = modes[type];
                // Draw the primitives
                glCheck(glDrawArrays(mode, 0, vertexCount));
                // Unbind the shader, if any

The problem is : when I draw to the window frame buffer it works, but, when I draw into an FBO it doesn’t display anything, it’s like if the VBO is not shared between the window context and the FBO context. Why ?

#2

I think I’ve found the problem, it’s because the VBO are shared between contexts. But I don’t understand why because textures are also shared between different context and it works for them. (And for shaders too)
It seems it’s not the same things for VBO.

#3

Or maybe I cannot create VBO with an offsreen context like textures and shaders ?

#4

If two contexts share textures then they also share buffer objects.

Note that container objects (e.g. VAOs and FBOs) are never shared between contexts.

#5

Ok I’ve solved this. We must unbind the VBO when settings glXXXPointer otherwise it doesn’t draw anything.

#6

I passed the pointers to the vertices (data +0, data+12 and data+16) instead of passing (0, 12 and 16) so it’s why it was not working.