Draws triangle in compatibility profile but not in core profile

So I just followed example from superbible and wrote this code. unfortunately it works in compatibility profile but not in core. Can any one point out what is the reason. Code is compiling successfully.

#include <GLFW/glfw3.h>
#include <iostream>
#include <GL/glx.h>
#include <stdio.h>
#include "procs.hpp"

constexpr const GLchar * vertex_shader_source = 
R"(#version 450 core
    const vec2 vertices[3] = vec2[3](vec2(0.25, -0.25),
    vec2(-0.25, -0.25),
    vec2(0.25, 0.25));

    void main(){
        gl_Position = vec4(vertices[gl_VertexID].xy,0.5,1);
    }

)";

constexpr const GLchar * fragment_shader_source = 
R"(#version 450 core
    out vec4 out_color;
    void main(){
        out_color= vec4(1,0,0,1);
    }
)"; 

int main()
{
    GLFWwindow* window;
    GLuint vertex_shader;
    GLuint fragment_shader;
    GLuint program;
    GLuint vertex_array_object;

    /* Initialize the library */
    if (!glfwInit()){
	std::cerr<<"Error initializing glfw";	
        return -1;
    }

    glfwWindowHint(GLFW_CONTEXT_VERSION_MAJOR, 3);
    glfwWindowHint(GLFW_CONTEXT_VERSION_MINOR, 3);
    glfwWindowHint(GLFW_OPENGL_DEBUG_CONTEXT, true);
    //glfwWindowHint(GLFW_OPENGL_PROFILE, GLFW_OPENGL_COMPAT_PROFILE);  
    glfwWindowHint(GLFW_OPENGL_PROFILE, GLFW_OPENGL_CORE_PROFILE); 

    /* Create a windowed mode window and its OpenGL context */
    window = glfwCreateWindow(1000, 800, "Hello World", NULL, NULL);
    if (!window)
    {
        glfwTerminate();
        return -1;
    }

    /* Make the window's context current */
    glfwMakeContextCurrent(window);
    printf("Opengl : %s\n", glGetString(GL_VERSION));
    vertex_shader = glCreateShader(GL_VERTEX_SHADER);
    glShaderSource(vertex_shader, 1, &vertex_shader_source, nullptr);
    glCompileShader(vertex_shader);
    
    
    fragment_shader = glCreateShader(GL_FRAGMENT_SHADER) ;
    glShaderSource(fragment_shader,1,&fragment_shader_source,nullptr);
    glCompileShader(fragment_shader);

    program = glCreateProgram();
    glAttachShader(program,vertex_shader);
    glAttachShader(program,fragment_shader);
    glLinkProgram(program);

    // Delete the shaders as the program has them now
    glDeleteShader(vertex_shader);
    glDeleteShader(fragment_shader);

    glCreateVertexArrays(1, &vertex_array_object);
    glBindVertexArray(vertex_array_object);

    /* Loop until the user closes the window */
    while (!glfwWindowShouldClose(window))
    {	
	    /* Render here */
        static GLfloat color []= {1,0,1,1};
        glClearBufferfv(GL_COLOR,0,color);
        glUseProgram(program);
        glDrawArrays(GL_TRIANGLES, 0, 3);
       
        /* Swap front and back buffers */
        glfwSwapBuffers(window);

        /* Poll for and process events */
        glfwPollEvents();
    }

    glDeleteProgram(program);

    glfwTerminate();
    return 0;
}

I’m an opengl user, by far not a specialist.
You should think of shaders as ‘processors’ that has data passing through them.
You’ve put data in them …
You should have a closer look at how to store data properly in buffers.
You probably have not come here if you’ve debugged the shaders in the first place. You can poll for an evaluation-string to read about errors in the shaders … it’s empty if all ok.
You could have tested if the program-value differs from 0 (indicating a proper result).
I don’t have proper links, but googl will pop plenty on say ‘debug shaders’

AFAIK, rendering without any attribute arrays enabled (e.g. using gl_VertexID to perform dependent fetches) is valid.

The only thing which stands out is choosing a 3.3 context but using a 4.5 shader and glCreateVertexArrays (which is 4.5). That, and not checking the compilation/linking status or logs.

Always room to grow wiser. Thanks