Fonts rendering on OSX

Hi. I’m trying to render Menlo.ttc font with FTGL library (C++). My effects aren’t satisfy me. This is what I have now:

[ATTACH=CONFIG]1339[/ATTACH]

This is not enough for me. My terminal (and CLion IDE) renders this font really good (bolder and sharpen). This is my code:


        std::string text = "Hello world";
        std::string font = "/System/Library/Fonts/Menlo.ttc";
        FTFont* fontObj = new FTTextureFont(font.c_str());
	fontObj->FaceSize(14);

	glColor3d(this->colorRed, this->colorGreen, this->colorBlue);
	fontObj->Render(text.c_str(), -1, FTPoint(20, 20));
	delete fontObj;

Can you tell me about pretty good font rendering? Can it be name of library or technology, I’ll google it.

Thanks

For a start, have you checked whether FTGL has any options which would improve rendering?

The main thing is to ensure that the glyphs are being rendered pixel-for-pixel, without any texture filtering. This requires setting up your projection transformation correctly for the size of the viewport. If you resize the window and want the text size to change in proportion, you need to re-render the glyphs for a different font size; don’t try to scale the rasterised glyphs. Also, scalable fonts often use hand-tuned bitmaps for specific small sizes; using one of those will likely be cleaner than rasterising an outline font at a small size.

Other than that, personally I’d just use FreeType and OpenGL directly. Connecting the two really isn’t complex enough to need a library.

Thanks.

Now I’m using FreeType library, but I have only white rectangle. I read many about texture rendering, but after some hours I have no idea what I’m doing wrong.

My code:

void TextRenderer::render(std::string text)
	{

                this->font = "/System/Library/Fonts/Menlo.ttc";

                FT_Init_FreeType(&this->library);

		FT_New_Face(this->library, this->font.c_str(), 0, &(this->face));

		FT_Set_Char_Size(
				this->face,    /* handle to face object           */
				0,             /* char_width in 1/64th of points  */
				16*64,         /* char_height in 1/64th of points */
				300,           /* horizontal device resolution    */
				300 	       /* vertical device resolution      */
		);

		FT_Load_Char(this->face, 'A', FT_LOAD_DEFAULT);

		FT_Render_Glyph(face->glyph, FT_RENDER_MODE_NORMAL);

		glPushMatrix();
		glEnable(GL_TEXTURE_2D);

		GLuint texture;
		glGenTextures(1, &texture);

		glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_NEAREST);
		glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_NEAREST);

		glActiveTexture(GL_TEXTURE0);
		glBindTexture(GL_TEXTURE_2D, texture);

		glTranslated(100, 100, 0);

		// FreeType
		FT_Int i, j, p, q;
		unsigned char image[this->face->glyph->bitmap.rows][this->face->glyph->bitmap.width];
		FT_Int x_max = this->positionLeft + this->face->glyph->bitmap.width;
		FT_Int y_max = this->positionTop + this->face->glyph->bitmap.rows;

		for(i=this->positionLeft, p=0; i<x_max; i++,p++)
		{
			for(j=this->positionTop, q=0; j<y_max; j++, q++)
			{
				if(i<0 || j<0 || i >= this->face->glyph->bitmap.width || j >= this->face->glyph->bitmap.rows)
				{
					continue;
				}
				std::cout<<i<<' '<<j<<std::endl;
				image[j][i] |= this->face->glyph->bitmap.buffer[q * this->face->glyph->bitmap.width + p];
				std::cout<<(int)image[j][i]<<std::endl;
			}
		}

		glTexImage2D(
			GL_TEXTURE_2D, 0, GL_RGB, this->face->glyph->bitmap.width, this->face->glyph->bitmap.rows, GL_RGB, 0, GL_UNSIGNED_BYTE, image
		);

		glBegin(GL_QUADS);
		glTexCoord2f(0.0, 0.0);
		glVertex2i(0, this->face->glyph->bitmap.rows);

		glTexCoord2f(1.0, 0.0);
		glVertex2i(this->face->glyph->bitmap.width, this->face->glyph->bitmap.rows);

		glTexCoord2f(1.0, 1.0);
		glVertex2i(this->face->glyph->bitmap.width, 0);

		glTexCoord2f(0.0, 1.0);
		glVertex2i(0, 0);
		glEnd();

		glDisable(GL_TEXTURE_2D);
		glPopMatrix();
}

What am I doing wrong?

The glBindTexture() call needs to be before the glTexParameter() calls. The filter modes are a property of a texture, and glTexParameteri() sets the modes for the texture bound to the active texture unit.

The minification filter for a newly-created texture is GL_NEAREST_MIPMAP_LINEAR, which requires mipmap levels; if they aren’t defined, the result is as if texturing is disabled. That would explain the white rectangle.

The texture only contains a single intensity value per pixel, so you should use GL_LUMINANCE rather than GL_RGB.

Also, because the row size isn’t guaranteed to be a multiple of four bytes, you need


        glPixelStorei(GL_UNPACK_ALIGNMENT, 1);

Okay, I did what you tell but the glyph is not rendering. Still is an white rectangle.

I cleared my code, so now you have full code:

#include <OpenGL/OpenGL.h>
#include <GLFW/glfw3.h>
#include <GLUT/glut.h>
#include <ft2build.h>
#include <freetype.h>
#include <iostream>
#include <string>

int main()
{
	GLFWwindow *glfwWindow = NULL;

	int width = 1280;
	int height = 800;

	std::string font = "/System/Library/Fonts/Menlo.ttc";

	FT_Library library;
	FT_Face face;

	if(!glfwInit())
	{
		return -2;
	}

	/* Create a windowed mode window and its OpenGL context */
	glfwWindow = glfwCreateWindow(width, height, "Sandbox", NULL, NULL);
	if(!glfwWindow)
	{
		glfwTerminate();
		return -1;
	}

	/* Make the window's context current */
	glfwMakeContextCurrent(glfwWindow);
	glMatrixMode(GL_PROJECTION);
	glOrtho(0, width, 0, height, -1, 1);

	glClearColor(0.22f, 0.23f, 0.24f, 1.0f);

	FT_Init_FreeType(&library);
	FT_New_Face(library, font.c_str(), 0, &face);

	FT_Set_Char_Size(
			face,    /* handle to face object           */
			0,       /* char_width in 1/64th of points  */
			16 * 64, /* char_height in 1/64th of points */
			300,     /* horizontal device resolution    */
			300      /* vertical device resolution      */
	);

	FT_Load_Char(face, 'A', FT_LOAD_DEFAULT);

	FT_Render_Glyph(face->glyph, FT_RENDER_MODE_NORMAL);

	/* Loop until the user closes the window */
	glMatrixMode(GL_MODELVIEW);
	while (!glfwWindowShouldClose(glfwWindow))
	{
		/* Render here */
		glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT);

		glPushMatrix();
		glEnable(GL_TEXTURE_2D);

		GLuint texture;
		glActiveTexture(GL_TEXTURE0);
		glGenTextures(1, &texture);
		glBindTexture(GL_TEXTURE_2D, texture);

		glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR);
		glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR);

		// FreeType
		glPixelStorei(GL_UNPACK_ALIGNMENT, 1);

		glTexImage2D(
				GL_TEXTURE_2D, 0, GL_LUMINANCE, face->glyph->bitmap.width, face->glyph->bitmap.rows, GL_LUMINANCE, 0,
				GL_UNSIGNED_BYTE, face->glyph->bitmap.buffer
		);

		glBegin(GL_QUADS);
		glTexCoord2f(0.0, 0.0);
		glVertex2i(0, face->glyph->bitmap.rows);

		glTexCoord2f(1.0, 0.0);
		glVertex2i(face->glyph->bitmap.width, face->glyph->bitmap.rows);

		glTexCoord2f(1.0, 1.0);
		glVertex2i(face->glyph->bitmap.width, 0);

		glTexCoord2f(0.0, 1.0);
		glVertex2i(0, 0);
		glEnd();

		glPopMatrix();

		glfwSwapBuffers(glfwWindow);

		glfwPollEvents();
	}

	FT_Done_FreeType(library);

    return 0;
}

Thanks for your patience!


Edit: My glGetError() returns 1281.

Edit: Error is after line


		glTexImage2D(
				GL_TEXTURE_2D, 0, GL_LUMINANCE, face->glyph->bitmap.width, face->glyph->bitmap.rows, GL_LUMINANCE, 0,
				GL_UNSIGNED_BYTE, face->glyph->bitmap.buffer
		);


Problem fixed. My mistake was I changed position with different parameters on glTexImage2D. Thanks! :slight_smile: