Unchanged shader suddenly refuses to compile

Hi all,

I have a weird shader compilation issue that seems to come and go. The actual error message is this:

ERROR: 0:1: ‘’ : version ‘150’ is not supported
ERROR: 0:1: ‘’ : syntax error #version

This is the shader:

[NOTE] std::string geometryVertexProgram =
"#version 150 core
"
"
"
"in vec4 position;
"
"in vec3 normal;
"
"uniform mat4 mvp;
"
"uniform vec4 plane1;
"
"uniform vec4 plane2;
"
"uniform vec4 plane3;
"
"uniform vec4 plane4;
"
"uniform vec4 plane5;
"
"uniform vec4 plane6;
"
"out vec4 wsPosition;
"
"out vec3 wsNormal;
"
"
"
"void main(void)
"
"{
"
" wsPosition = position;
"
" wsNormal = normal;
"
" gl_ClipDistance[0] = dot(plane1, wsPosition);
"
" gl_ClipDistance[1] = dot(plane2, wsPosition);
"
" gl_ClipDistance[2] = dot(plane3, wsPosition);
"
" gl_ClipDistance[3] = dot(plane4, wsPosition);
"
" gl_ClipDistance[4] = dot(plane5, wsPosition);
"
" gl_ClipDistance[5] = dot(plane6, wsPosition);
"
" gl_Position = mvp * position;
"
"}
";

        std::string geometryFragmentProgram =
            "#version 150 core                                                          

"
"
"
"in vec4 wsPosition;
"
"in vec3 wsNormal;
"
"out vec4 color;
"
"uniform vec3 lightdir1;
"
"uniform vec3 lightdir2;
"
"uniform vec3 lightdir3;
"
"uniform vec3 camerapos;
"
"uniform vec4 diffuse;
"
"uniform vec4 ambient;
"
"uniform vec4 specular;
"
"uniform float shininess;
"
"uniform float transparency;
"
"
"
"void main(void)
"
"{
"
" vec3 n = normalize(wsNormal.xyz);
"
" vec3 l1 = normalize(-lightdir1);
"
" vec3 l2 = normalize(-lightdir2);
"
" vec3 l3 = normalize(-lightdir3);
"
" vec3 v = normalize(camerapos - wsPosition.xyz);
"
" vec3 r1 = normalize(-reflect(l1, n));
"
" vec3 r2 = normalize(-reflect(l2, n));
"
" vec3 r3 = normalize(-reflect(l3, n));
"
" //mat3 m = mat3(l1, l2, l3);
"
" float dotNL1 = dot(n, l1);
"
" float dotNL2 = dot(n, l2);
"
" float dotNL3 = dot(n, l3);
"
" float lamb1 = 0.9 * clamp(dotNL1, 0.0, 1.0);
"
" float lamb2 = 0.7 * clamp(dotNL2, 0.0, 1.0);
"
" float lamb3 = 0.5 * clamp(dotNL3, 0.0, 1.0);
"
" float phong1 = 0.9 * pow(max(dot(r1,v), 0.0), shininess);
"
" float phong3 = 0.7 * pow(max(dot(r3,v), 0.0), shininess);
"
" vec3 c;
"
#ifdef WIN
" vec3 exp = vec3(2.2, 2.2, 2.2);
"
#elif defined OSX
" vec3 exp = vec3(2.0, 2.0, 2.0);
"
#endif
" c = (lamb1 + lamb2 + lamb3) * pow(diffuse.rgb, exp);
"
" c += 0.2 * pow(ambient.rgb, exp);
"
" c += (phong1 + phong3) * pow(specular.rgb, exp);
"
" color = vec4(pow(c, vec3(1.0, 1.0, 1.0) / exp), transparency);
"
"}
";
[/NOTE]

The weird part is that other shaders, that also have the same version compile just fine. In fact some are nearly identical to this shader. The shader itself has been in use for a while and has previously compiled on the machine in question.

I’m compiling the shader like this:

[NOTE]OpenGLShader::OpenGLShader() : Shader(),
    _vertexProgramCompiled(false),
    _fragmentProgramCompiled(false)
{
    _program = glCreateProgram();
    
    assert(glGetError() == 0);
}

OpenGLShader::~OpenGLShader()
{
    VERIFY(glDeleteProgram(_program));
}

bool OpenGLShader::SetVertexProgram(const std::string& src)
{
    _vertexProgramCompiled = false;
    
    const GLchar *source = (const GLchar *) src.c_str();
    
    _vertexProgram = glCreateShader(GL_VERTEX_SHADER);
    assert(glGetError() == 0);
    
    VERIFY(glShaderSource(_vertexProgram, 1, &source, nullptr));
    VERIFY(glCompileShader(_vertexProgram));
    GLint isCompiled = 0;
    VERIFY(glGetShaderiv(_vertexProgram, GL_COMPILE_STATUS, &isCompiled));
    if (isCompiled == GL_FALSE)
    {
        PrintShaderLog(_vertexProgram);
        VERIFY(glDeleteShader(_vertexProgram));
        return false;
    }

    _vertexProgramCompiled = true;
    return true;
}

bool OpenGLShader::SetFragmentProgram(const std::string& src)
{
    _fragmentProgramCompiled = false;
    
    const GLchar *source = (const GLchar *)src.c_str();
    
    _fragmentProgram = glCreateShader(GL_FRAGMENT_SHADER);
    assert(glGetError() == 0);
            
    VERIFY(glShaderSource(_fragmentProgram, 1, &source, nullptr));
    VERIFY(glCompileShader(_fragmentProgram));
    GLint isCompiled = 0;
    VERIFY(glGetShaderiv(_fragmentProgram, GL_COMPILE_STATUS, &isCompiled));
    if (isCompiled == GL_FALSE)
    {
        PrintShaderLog(_fragmentProgram);
        VERIFY(glDeleteShader(_fragmentProgram));
        return false;
    }

    _fragmentProgramCompiled = true;
    return true;
}

bool OpenGLShader::CompileShader()
{
    if (false == _vertexProgramCompiled || false == _fragmentProgramCompiled)
    {
        return false;
    }
    
    VERIFY(glAttachShader(_program, _vertexProgram));
    VERIFY(glAttachShader(_program, _fragmentProgram));
    
    VERIFY(glBindAttribLocation(_program, 0, "position"));
    VERIFY(glBindAttribLocation(_program, 1, "normal"));
    
    VERIFY(glLinkProgram(_program));

    GLint isLinked = 0;
    VERIFY(glGetProgramiv(_program, GL_LINK_STATUS, &isLinked));
    if (isLinked == GL_FALSE)
    {
        PrintProgramLog(_program);
    }

    VERIFY(glDetachShader(_program, _vertexProgram));
    VERIFY(glDeleteShader(_vertexProgram));
    
    VERIFY(glDetachShader(_program, _fragmentProgram));
    VERIFY(glDeleteShader(_fragmentProgram));
    
    return isLinked;
}

[/NOTE]

The OpenGL context is created like this:

[NOTE] NSOpenGLPixelFormatAttribute pixelFormatAttributes[] =
{
NSOpenGLPFAColorSize, 32,
NSOpenGLPFADepthSize, 24,
NSOpenGLPFAStencilSize, 8,
NSOpenGLPFAAccelerated,
NSOpenGLPFAOpenGLProfile, NSOpenGLProfileVersion3_2Core,
NSOpenGLPFAMultisample,
NSOpenGLPFASampleAlpha,
NSOpenGLPFASampleBuffers, 1,
NSOpenGLPFASamples, 8,
0
};
NSOpenGLPixelFormat pixelFormat = [[NSOpenGLPixelFormat alloc]
initWithAttributes: pixelFormatAttributes];
NSOpenGLContext
openGLContext = [[NSOpenGLContext alloc]
initWithFormat: pixelFormat shareContext:nil];

if (!(self = [super initWithCoder: aDecoder]))
{
    return nil;
}

[self setPixelFormat: pixelFormat];
[self setOpenGLContext: openGLContext];
[openGLContext makeCurrentContext];

[/NOTE]

Info about the computer:

OS X 10.9.5
HW Vendor: Intel Inc.
Renderer: Intel HD Graphics 4000 OpenGL Engine
OpenGL 4.1 INTEL-8.28.32
GLSL 4.10

I’m stuck on this issue so any help will be greatly appreciated!

Greets,

Floris

The only way you will get this error is with a legacy 2.1 context. When the error occurs, verify glGetString(GL_VERSION). Also check CGLGetCurrentContext(); contexts are set per-thread, perhaps you call some OS functionality which is making another context current?

This info is pulled from OpenGL and inserted into the log, but it’s pulled a lot earlier.

Intel Inc.
Intel HD Graphics 4000 OpenGL Engine
OpenGL 4.1 INTEL-8.28.32
GLSL 4.10

I will explicitly try glGetVersion just before the shader that fails. I will also try making our context current again before the shader is compiled.

Thanks for the ideas!

Greets,

Floris

Thx, making our context active again solved the problem. D’oh, such an easy fix, but it didn’t occur to me. It makes sense, because the GUI uses OpenGL for rendering too and there’s a file dialog displayed before loading.

Greets,

Floris

This topic was automatically closed 183 days after the last reply. New replies are no longer allowed.