Hey, i'm having a weird issue with drawing text in openGL loaded with the Freetype 2 library. Here is a screenshot of what I'm seeing.
example http://img203.imageshack.us/img203/3316/freetypeweird.png
Here are my code bits for loading and rendering my text.
class Font
{
Font(const String& filename)
{
if (FT_New_Face(Font::ftLibrary, "arial.ttf", 0, &mFace)) {
cout << "UH OH!" << endl;
}
FT_Set_Char_Size(mFace, 16 * 64, 16 * 64, 72, 72);
}
Glyph* GetGlyph(const unsigned char ch)
{
if(FT_Load_Char(mFace, ch, FT_LOAD_RENDER))
cout << "OUCH" << endl;
FT_Glyph glyph;
if(FT_Get_Glyph( mFace->glyph, &glyph ))
cout << "OUCH" << endl;
FT_BitmapGlyph bitmap_glyph = (FT_BitmapGlyph)glyph;
Glyph* thisGlyph = new Glyph;
thisGlyph->buffer = bitmap_glyph->bitmap.buffer;
thisGlyph->width = bitmap_glyph->bitmap.width;
thisGlyph->height = bitmap_glyph->bitmap.rows;
return thisGlyph;
}
};
The relevant glyph information (width, height, buffer) is stored in the following struct
struct Glyph {
GLubyte* buffer;
Uint width;
Uint height;
};
And finally, to render it, I have this class called RenderFont.
class RenderFont
{
RenderFont(Font* font)
{
mTextureIds = new GLuint[128];
mFirstDisplayListId=glGenLists(128);
glGenTextures( 128, mTextureIds );
for(unsigned char i=0;i<128;i++)
{
MakeDisplayList(font, i);
}
}
void MakeDisplayList(Font* font, unsigned char ch)
{
Glyph* glyph = font->GetGlyph(ch);
glBindTexture( GL_TEXTURE_2D, mTextureIds[ch]);
glTexParameteri(GL_TEXTURE_2D,GL_TEXTURE_MAG_FILTER,GL_LINEAR);
glTexParameteri(GL_TEXTURE_2D,GL_TEXTURE_MIN_FILTER,GL_LINEAR);
glTexImage2D(GL_TEXTURE_2D,
0,
GL_RGBA,
glyph->width,
glyph->height,
0,
GL_ALPHA,
GL_UNSIGNED_BYTE,
glyph->buffer);
glNewList(mFirstDisplayListId+ch,GL_COMPILE);
glBindTexture(GL_TEXTURE_2D, mTextureIds[ch]);
glBegin(GL_QUADS);
glTexCoord2d(0,1); glVertex2f(0,glyph->height);
glTexCoord2d(0,0); glVertex2f(0,0);
glTexCoord2d(1,0); glVertex2f(glyph->width,0);
glTexCoord2d(1,1); glVertex2f(glyph->width,glyph->height);
glEnd();
glTranslatef(16, 0, 0);
glEndList();
}
void Draw(const String& text, Uint size, const TransformComponent* transform, const Color32* color)
{
glEnable(GL_TEXTURE_2D);
glEnable(GL_BLEND);
glBlendFunc(GL_SRC_ALPHA, GL_ONE_MINUS_SRC_ALPHA);
glTranslatef(100, 250, 0.0f);
glListBase(mFirstDisplayListId);
glCallLists(text.length(), GL_UNSIGNED_BYTE, text.c_str());
glDisable(GL_TEXTURE_2D);
glDisable(GL_BLEND);
glLoadIdentity();
}
private:
GLuint mFirstDisplayListId;
GLuint* mTextureIds;
};
Can anybody see anything weird going on here that would cause the garbled text? It's strange because if I change the font size, or the DPI, then some of the letters that display correctly become garbled, and other letters that were garbled before then display correctly.
I'm not familiar with FreeType, but from the picture, it looks like the width of the characters is not directly related to the size of the buffers (ie. glyph->buffer does not point to an array of glyph->width*glyth->height bytes).
As a guess, I'd say that all the chars have a single width in memory (as opposed to the size they use on screen), probably the biggest of all the widths of the glyphs, but you load them with a per-char width instead of the correct one. So, only the glyphs that use the full width are correct.
Use:
glPixelStorei(GL_PACK_ALIGNMENT, 1);
glPixelStorei(GL_UNPACK_ALIGNMENT, 1);
This annoyed the heck out of me too, but you need to tell OpenGL to use the spacing you give it, not the normal 32-bit boundaries it expects. The pitch of the images changes, but OpenGL
doesn't know to use smaller packing alignments without these calls before your texture creation.
I do it like this:
// Convert the glyph to a bitmap.
FT_Glyph_To_Bitmap(&glyph, FT_RENDER_MODE_NORMAL, NULL, true);
FT_BitmapGlyph bitmap_glyph = (FT_BitmapGlyph)glyph;
// This reference will make accessing the bitmap easier
FT_Bitmap& bitmap = bitmap_glyph->bitmap;
int _Left = abs(bitmap_glyph->left);
int _Top = abs(bitmap_glyph->top);
int _Height = abs(bitmap.rows);
int _Width = _Left+abs(bitmap.width);
// If it's not a glyph or spacing, go to the next one
if ((_Width == 0 || _Height == 0) && !isspace(i))
return;
advances[i] = max(_Width, face->glyph->advance.x >> 6);
vector<unsigned char> Data(_Height*_Width*2, 0);
for (int32 h=0; h < abs(bitmap.rows); ++h)
for (int32 w=0; w < abs(bitmap.width); ++w)
{
int32 luminance = bitmap.buffer[h*bitmap.pitch + w];
Data[(h*_Width + w + _Left)*2 + 0] = 255;
Data[(h*_Width + w + _Left)*2 + 1] = luminance;
}
I could probably move the 255 (White) into the String initialization function then just use the values in FreeType
for my alpha values, but this way seems more descriptive and speed isn't an issue in my usage.
The Address &Data[0] now contains a GL_LUMINANCE_ALPHA external
format, with type GL_UNSIGNED_CHAR
and size _Width*_Height
. This should make anyone who does this stuff's life easier.
Are you sure that the FT_Glyph
is actually a bitmap glyph? Make sure you use FT_Glyph_To_Bitmap
first.
Alternatively, since you don't seem to need to store the FT_Glyph
s around afterwards, you can just do:
int error = FT_Load_Char(face, ch, FT_LOAD_RENDER);
if(error)
return error;
FT_GlyphSlot slot = face->glyph;
FT_Bitmap bitmap = slot->bitmap;
// do stuff with this FT_Bitmap
See here for the docs on FT_Bitmap
. Just note that the next time you call FT_Load_Char
, the data in bitmap
will no longer be valid.
You also have a number of issues with memory management.
You use new Glyph
to allocate your glyphs, but never call delete.
Since you just need to temporarily make a glyph to generate the texture and display list, you should use an std::auto_ptr<Glyph>
.
You never call FT_Glyph_Done
, so all those FT_Glyph
s you allocated are never released.
If you love us? You can donate to us via Paypal or buy me a coffee so we can maintain and grow! Thank you!
Donate Us With