Load shader source using C static allocation only

Hello. This is a question about the C language more than OGL. I am a novice in both.

I use C11 and GL33C on GNU/Linux x64. The goal is to load shader source file contents and pass them to glShaderSource, etc. I did write a working program using calloc.

For educational purposes, I would like to write a different version, that only uses static allocation. I may be misunderstanding how the process works.

Here is the relevant function. It does not compile.

bool shader_source_file_load(const GLuint shader_name, const char* source_file_pathname)
{
	if (0 == shader_name) {
		return false;
	}

	bool success_flag = true;

	char sane_pathname[8192] = {0};
	realpath(source_file_pathname, sane_pathname);

	FILE* f = fopen(sane_pathname, "r");
	if (NULL == f) {
		perror("shader source: could not open source file");
		goto fail;
	}

	enum {
		BUF_SIZE = 8192,
		MAX_LINE_QUANTITY = 1024,
	};
	GLsizei count = 0;
	GLchar content[MAX_LINE_QUANTITY][BUF_SIZE] = {{0}, {0}};
	GLint length[MAX_LINE_QUANTITY] = {0};
	char buf_p[BUF_SIZE] = {0};

	setvbuf(f, buf_p, _IOLBF, BUF_SIZE);

	while (!feof(f)) {
		if (ferror(f)) {
			perror("shader source: could not read source file");
			goto fail;
		}
		char* t = fgets(buf_p, BUF_SIZE, f);
		if (NULL == t) {
			break;
		}
		int32_t line_len = 0;
		for (uint32_t k = 0; k < BUF_SIZE; ++k) {
			if ('\0' == buf_p[k]) {
				/* Line length excluding the null termination character. */
				line_len = k - 1;
				break;
			}
			content[count][k] = buf_p[k];
		}
		length[count] = line_len;
		++count;
		if (count >= MAX_LINE_QUANTITY) {
			fputs("shader source: too many lines\n", stderr);
			goto fail;
		}
	}

	fclose(f);

	if (success_flag) {
		glShaderSource(shader_name, count, content, length);
		const GLenum e = glGetError();
		switch (e) {
			case GL_NO_ERROR: {
				success_flag = true || success_flag ;
				break;
			}
			case GL_INVALID_OPERATION: {
				fputs("shader source: require valid shader object\n", stderr);
			}
			case GL_INVALID_VALUE: {
				fputs("malformed shader source\n", stderr);
			}
			default: {
				goto fail;
			}
		}
	}

	return success_flag;

	fail: {
		clearerr(f);
		fclose(f);
		gl_error_print(stderr);
		success_flag = false;
		return success_flag;
	}
}

The compiler complains the type is incompatible. How do I make it compatible? I understand that arrays and pointers are distinct concepts. I also believed that arrays can be accessed via pointers. glShaderSource expects an array of char pointers. Isn’t it what I am trying to pass here? What’s the difference?

I understand that fgets buffer is effectively cleared on each call. Hence why I copy the contents. Am I doing it correctly here?

src/gl/base.c: In function ‘shader_source_file_load’:
src/gl/base.c:130:52: error: passing argument 3 of ‘epoxy_glShaderSource’ from incompatible pointer type [-Werror=incompatible-pointer-types]
  130 |                 glShaderSource(shader_name, count, content, length);
      |                                                    ^~~~~~~
      |                                                    |
      |                                                    GLchar (*)[8192] {aka char (*)[8192]}
src/gl/base.c:130:52: note: expected ‘const GLchar * const*’ {aka ‘const char * const*’} but argument is of type ‘GLchar (*)[8192]’ {aka ‘char (*)[8192]’}
cc1: all warnings being treated as errors

You’re passing an array of arrays. Arrays aren’t pointers; they decay to a pointer to the first element when used as values, but that doesn’t help here.

You need to create and populate an array of pointers, something like:

char *lines[MAX_LINE_QUANTITY];
for (int i=0; i< count; i++)
    lines[i] = content[i];
glShaderSource(shader_name, count, lines, length);

This gives me another “incompatible type” compilation error.

src/gl/base.c: In function ‘shader_source_file_load’:
src/gl/base.c:134:52: error: passing argument 3 of ‘epoxy_glShaderSource’ from incompatible pointer type [-Werror=incompatible-pointer-types]
  134 |                 glShaderSource(shader_name, count, lines, length);
      |                                                    ^~~~~
      |                                                    |
      |                                                    GLchar ** {aka char **}
src/gl/base.c:134:52: note: expected ‘const GLchar * const*’ {aka ‘const char * const*’} but argument is of type ‘GLchar **’ {aka ‘char **’}
cc1: all warnings being treated as errors

If I declare lines with const I get segmentation fault at runtime. I copied your snippet directly.

Here is another snippet. It fails at runtime. I populate an char array that represents a line from the source file, and save the pointer to the first character in it. This is what glShaderSource eventually receives.

The problem with this particular snippet is the extent of the line. When it goes out of scope, all the data is lost. Actually, that’s probably not the issue. Now I suspect it was because I was overwriting the same memory chunk with each iteration where the pointer pointed to. Or something.

Another version of the same function only saved some of the data, most of it was weirdly truncated.

bool shader_source_file_load(const GLuint shader_name, const char* source_file_pathname)
{
	if (0 == shader_name) {
		return false;
	}

	bool success_flag = true;

	char sane_pathname[8192] = {0};
	realpath(source_file_pathname, sane_pathname);

	FILE* f = fopen(sane_pathname, "r");
	if (NULL == f) {
		perror("shader source: could not open source file");
		goto fail;
	}

	enum {
		BUF_SIZE = 8192,
		MAX_LINE_QUANTITY = 1024,
	};
	GLsizei count = 0;
	const GLchar* content[MAX_LINE_QUANTITY];
	GLint length[MAX_LINE_QUANTITY] = {0};
	char buf_p[BUF_SIZE] = {0};

	setvbuf(f, buf_p, _IOLBF, BUF_SIZE);

	while (!feof(f)) {
		if (ferror(f)) {
			perror("shader source: could not read source file");
			goto fail;
		}
		char* t = fgets(buf_p, BUF_SIZE, f);
		if (NULL == t) {
			break;
		}
		static GLchar line[BUF_SIZE] = {0};
		uint32_t k = 0;
		while (k < BUF_SIZE) {
			char c = buf_p[k];
			line[k] = c;
			if ('\0' == c) {
				break;
			}
			++k;
		}
		/* Prints real shader source. */
		fputs(line, stdout);
		content[count] = &line[0];
		length[count] = k;
		++count;
		if (count >= MAX_LINE_QUANTITY) {
			fputs("shader source: too many lines\n", stderr);
			goto fail;
		}
	}

	/* Prints nonsense. */
	for (uint32_t i = 0; i < count; ++i) {
		fputs(content[i], stdout);
	}

	fclose(f);

	if (success_flag) {
		glShaderSource(shader_name, count, content, length);
		const GLenum e = glGetError();
		switch (e) {
			case GL_NO_ERROR: {
				success_flag = true || success_flag ;
				break;
			}
			case GL_INVALID_OPERATION: {
				fputs("shader source: require valid shader object\n", stderr);
			}
			case GL_INVALID_VALUE: {
				fputs("malformed shader source\n", stderr);
			}
			default: {
				goto fail;
			}
		}
		shader_source_print(shader_name);
	}

	return success_flag;

	fail: {
		clearerr(f);
		fclose(f);
		gl_error_print(stderr);
		success_flag = false;
		return success_flag;
	}
}

Just in case here is the link to the actual working version that uses calloc. (Yes, I know about free and fread.) I don’t understand why is it so difficult to translate that snippet without using heap memory allocation. Should I just give up and do it that way?

I can’t include links yet so I copy the source for now.

bool shader_source_file_load(const GLuint shader_name, const char* source_file_pathname)
{
	if (0 == shader_name) {
		return false;
	}

	bool success_flag = true;

	char sane_pathname[8192] = {0};
	realpath(source_file_pathname, sane_pathname);

	FILE* f = fopen(sane_pathname, "r");
	if (NULL == f) {
		perror("shader source: could not open source file");
		goto fail;
	}

	#define BUF_SIZE 65535
	#define MAX_LINE_QUANTITY 1024
	GLsizei count = 0;
	/* FIXME I don't understand the type that glShaderSource expects. */
	const GLchar* content[MAX_LINE_QUANTITY];
	char buf_p[BUF_SIZE] = {0};

	setvbuf(f, buf_p, _IOLBF, BUF_SIZE);

	while (!feof(f)) {
		if (ferror(f)) {
			perror("shader source: could not read source file");
			goto fail;
		}
		char* t = fgets(buf_p, BUF_SIZE, f);
		if (NULL == t) {
			break;
		}
		/* FIXME Remove dynamic allocation. */
		char* line = calloc(BUF_SIZE, sizeof(char));
		for (uint32_t k = 0; k < BUF_SIZE; ++k) {
			line[k] = buf_p[k];
		}
		content[count] = line;
		++count;
		if (count >= MAX_LINE_QUANTITY) {
			fputs("shader source: too many lines\n", stderr);
			goto fail;
		}
	}

	fclose(f);

	if (success_flag) {
		glShaderSource(shader_name, count, content, NULL);
		const GLenum e = glGetError();
		switch (e) {
			case GL_NO_ERROR: {
				success_flag = true || success_flag ;
				break;
			}
			case GL_INVALID_OPERATION: {
				fputs("shader source: require valid shader object\n", stderr);
			}
			case GL_INVALID_VALUE: {
				fputs("malformed shader source\n", stderr);
			}
			default: {
				goto fail;
			}
		}
	}

	return success_flag;

	fail: {
		clearerr(f);
		fclose(f);
		gl_error_print(stderr);
		success_flag = false;
		return success_flag;
	}
}

I finally figured it out. Here is the working example.

bool shader_source_file_load(const GLuint shader_name, const char* source_file_pathname)
{
	if (0 == shader_name) {
		return false;
	}

	bool success_flag = true;

	char sane_pathname[8192] = {0};
	realpath(source_file_pathname, sane_pathname);

	FILE* f = fopen(sane_pathname, "r");
	if (NULL == f) {
		perror("shader source: could not open source file");
		goto fail;
	}

	enum {
		BUF_SIZE = 8192,
		MAX_LINE_QUANTITY = 1024,
	};
	GLsizei count = 0;
	const GLchar* content[MAX_LINE_QUANTITY];
	GLint length[MAX_LINE_QUANTITY] = {0};
	char buf_p[BUF_SIZE] = {0};

	setvbuf(f, buf_p, _IOLBF, BUF_SIZE);
	/* WARNING: `static` modifier is required. */
	static GLchar lines[MAX_LINE_QUANTITY][BUF_SIZE] = {{0}, {0}};

	while (!feof(f)) {
		if (ferror(f)) {
			perror("shader source: could not read source file");
			goto fail;
		}
		char* t = fgets(buf_p, BUF_SIZE, f);
		if (NULL == t) {
			break;
		}
		uint32_t k = 0;
		while (k < BUF_SIZE) {
			char c = buf_p[k];
			lines[count][k] = c;
			if ('\0' == c) {
				break;
			}
			++k;
		}
		content[count] = &lines[count][0];
		length[count] = k;
		++count;
		if (count >= MAX_LINE_QUANTITY) {
			fputs("shader source: too many lines\n", stderr);
			goto fail;
		}
	}

	fclose(f);

	if (success_flag) {
		glShaderSource(shader_name, count, content, length);
		const GLenum e = glGetError();
		switch (e) {
			case GL_NO_ERROR: {
				success_flag = true || success_flag ;
				break;
			}
			case GL_INVALID_OPERATION: {
				fputs("shader source: require valid shader object\n", stderr);
			}
			case GL_INVALID_VALUE: {
				fputs("malformed shader source\n", stderr);
			}
			default: {
				goto fail;
			}
		}
		shader_source_print(shader_name);
	}

	return success_flag;

	fail: {
		clearerr(f);
		fclose(f);
		gl_error_print(stderr);
		success_flag = false;
		return success_flag;
	}
}

I suspect the problem was with me over-writing memory where I stored lines. This is still probably an example of what not to do.

Thanks to @GClements .