Why isn't my window a perfect square?

In the following code, you’ll notice that WITH and HEIGHT are the same value, but when the window is opened, it is still a rectangle that has a height greater than its width. Any help is greatly appreciated.

#include <iostream>
#include <stdio.h>
#include <GL/glew.h>
#include <GLFW/glfw3.h>
#include <string>
#include "util.h"
#include <thread>
#include <chrono>

// Setup: Linker->General, Linker->Input, add that .dll next to main

// Window dimensions
const GLint WIDTH = 1600, HEIGHT = 1600; // height was 1200

GLuint VAO, VBO, shader;

int numberOfVerticies = 2; // number of full points we want to draw
int index = 0;

// Vertex shader -> takes each vertex and allows you manipulate them then pass them onto the fragmanet shader

// In the shaders you are writing GLSL
static const char* vShader = "  \n\
#version 330	\n\
layout (location = 0) in vec3 pos;		\n\
										\n\
void main()										\n\
{							\n\
	gl_Position = vec4(pos.x, pos.y, pos.z, 1.0);								\n\
										\n\
										\n\
}";


// Fragment shader
static const char* fShader = "  \n\
#version 330	\n\
out vec4 colour; 		\n\
										\n\
void main()										\n\
{							\n\
	colour = vec4(1.0, 1.0, 1.0, 1.0);								\n\
										\n\
										\n\
}";

void CreateLines()
{
	// These are the points that make up the triangle x,y,z
	/*GLfloat verticies[] = {
		-0.5f, 0.0f, 0.0f,
		0.5f, 0.0f, 0.0f,

		-0.5f, 0.2f, 0.0f,
		0.5f, 0.2f, 0.0f,
	};*/

	GLfloat verticies[12];
	util::addToArray(verticies, index);


	glGenVertexArrays(1, &VAO); // Reserves space on the graphics card to be accessed by the variable VAO
	// Now we bind the Vertex Array so now any opengl functions we use that interact with vertext arrays or vertex buffers will all be taking place within this vertex array defined by VAO
	glBindVertexArray(VAO);

	glGenBuffers(1, &VBO);
	glBindBuffer(GL_ARRAY_BUFFER, VBO);
	// Static draw used when you aren't going to be changing the values in the actual array
	glBufferData(GL_ARRAY_BUFFER, sizeof(verticies), verticies, GL_DYNAMIC_DRAW); // Just changing to dynamic didn't make a bug

	glVertexAttribPointer(0, 3, GL_FLOAT, GL_FALSE, 0, 0); // location of attribute, values at a time, type of those values, normalization, stride, offset
	glEnableVertexAttribArray(0);

	glBindBuffer(GL_ARRAY_BUFFER, 0); // Unbind
	glBindVertexArray(0); // Unbind
}

void AddShader(GLuint theProgram, const char* shaderCode, GLenum shaderType)
{
	GLuint theShader = glCreateShader(shaderType); // Creates an empty shader of that type and pass ID into theShader

	const GLchar* theCode[1];
	theCode[0] = shaderCode;

	GLint codeLength[1];
	codeLength[0] = strlen(shaderCode);

	glShaderSource(theShader, 1, theCode, codeLength); // This modifies the value of the shader code in memory
	glCompileShader(theShader);

	GLint  result = 0;
	GLchar eLog[1024] = { 0 };

	glGetShaderiv(theShader, GL_COMPILE_STATUS, &result); // Gets result of linking the shader
	if (!result)
	{
		glGetShaderInfoLog(theShader, sizeof(eLog), NULL, eLog);
		printf("Error compiling the %d shader: '%s'\n", shaderType, eLog);
		return;
	}
	glAttachShader(theProgram, theShader);
}

// Compiling the shaders, not handling adding the shaders to the program
void CompileShaders()
{
	shader = glCreateProgram(); // Creates the program and gives shader the id so we can use shader to modify it

	if (!shader)
	{
		printf("Error creating shader\n");
		return;
	}

	AddShader(shader, vShader, GL_VERTEX_SHADER); // GL_VERTEX_SHADER is a built in enum -> type of shader -> needs to know what type of shader is being used
	AddShader(shader, fShader, GL_FRAGMENT_SHADER);

	GLint  result = 0;
	GLchar eLog[1024] = { 0 };

	glLinkProgram(shader); // Actually create the executables on the graphics card and make sure it's working
	glGetProgramiv(shader, GL_LINK_STATUS, &result); // Gets result of linking the shader
	if (!result)
	{
		glGetProgramInfoLog(shader, sizeof(eLog), NULL, eLog);
		printf("Error linking program: '%s'\n", eLog);
		return;
	}

	// Validate the program
	glValidateProgram(shader); // Makes sure the shader is valid in the current context that we're working in

	glGetProgramiv(shader, GL_VALIDATE_STATUS, &result); // Gets result of linking the shader
	if (!result)
	{
		glGetProgramInfoLog(shader, sizeof(eLog), NULL, eLog);
		printf("Error validating program: '%s'\n", eLog);
		return;
	}

}

int main()
{

	// Initialize GLFW
	if (!glfwInit())
	{
		printf("GLFW initialization failed!");
		glfwTerminate();
		std::cin.get();
		return 1;
	}

	// Setup GLFW window properties
	// OpenGL version
	glfwWindowHint(GLFW_CONTEXT_VERSION_MAJOR, 3);
	glfwWindowHint(GLFW_CONTEXT_VERSION_MINOR, 3);
	// Core profile = No backwards compatabililty
	glfwWindowHint(GLFW_OPENGL_PROFILE, GLFW_OPENGL_CORE_PROFILE);
	// Allow forward compatibility
	glfwWindowHint(GLFW_OPENGL_FORWARD_COMPAT, GL_TRUE);

	GLFWwindow* mainWindow = glfwCreateWindow(WIDTH, HEIGHT, "Playground", NULL, NULL);

	if (!mainWindow)
	{
		printf("GLFW window creation failed");
		glfwTerminate();
		std::cin.get();
		return 1;
	}

	// Get buffer size information
	// We want to get the dimentions of the area in the middle of the window, it's the buffer, it's the part that's going to be holding all the openGL data as it's being passed to the window
	int bufferWidth, bufferHeight;
	glfwGetFramebufferSize(mainWindow, &bufferWidth, &bufferHeight);

	// Set the context for GLEW to use
	// Let GLEW know that this OpenGL context is the one everything should be tied to so when everything gets drawn it should get drawn to this window
	glfwMakeContextCurrent(mainWindow);
	
	// Allow modern extension features
	glewExperimental = GL_TRUE;

	if (glewInit() != GLEW_OK)
	{
		printf("GLEW initilisation failed!");
		glfwDestroyWindow(mainWindow);
		glfwTerminate();
		std::cin.get();
		return 1;
	}

	// Setup viewport size
	// There is a difference here between WIDTH and bufferWidth, should look into
	glViewport(0, 0, bufferWidth, bufferHeight);

	// CreateLines();
	// CompileShaders();

	int i = 0;

	// Loop until window closed
	while (!glfwWindowShouldClose(mainWindow))
	{

		// Get and handle user input events
		glfwPollEvents(); // will check for any user events

		CreateLines();
		CompileShaders();
		index++;

		// Clear the window
		glClearColor(0.0f, 0.0f, 0.0f, 1.0f);
		glClear(GL_COLOR_BUFFER_BIT);

		glUseProgram(shader); // Grabs the id then goes to the graphics card and says to use the one with the ID of shader
		// Everything below here will be using this shader
		glBindVertexArray(VAO);
		// Now using that VAO

		glDrawArrays(GL_LINES, 0, numberOfVerticies); // Mode, where to start in the array, the ammount of points we want to draw

		glBindVertexArray(0);
		glUseProgram(0); // Unassignes shader

		glfwSwapBuffers(mainWindow); // We are drawing to a hidden buffer that constantly gets swapped around
	}
	std::cin.get();
	return 0;
}

Are your pixels square? If not, that’s probably why.

Grab a screenshot of your window and (in an image editor) measure the dimensions of the draw area (in pixels). Then grab a ruler and measure the physical real-world dimensions of this window in inches (or cm) on your display. Is the pixels/inch (or pixels/cm) the same in both width and height?

Also, read the below links, keeping in mind that the window manager can completely override your request for a specific window size, and that window size != framebuffer size:

As mentioned, use glfwGetFramebufferSize() (among other methods) to determine the actual framebuffer resolution you need to render to the window you created.

Just out of curiosity, what are the maximum resolutions of your scree

I would have gone for the simplest reason why it looks rectangle. Maybe the screen display has 1280 pixels height (or even less) ?