Cannot get freetype to render glyph to texture

Started by
5 comments, last by JoeyDewd 9 years, 4 months ago

I was following this tutorial: http://en.wikibooks.org/wiki/OpenGL_Programming/Modern_OpenGL_Tutorial_Text_Rendering_01,

and trying to get text to render to a quad on the screen. I followed all the rules. I don't know why it isn't working.

Here is my code. I try to render only one character, the letter 'H'. I only get a green colored quad :/.


int main()
{
    glfwInit();

    FT_Library ftLibrary;
    if(FT_Init_FreeType(&ftLibrary))
    {
        MessageBox(0, "wouldn't initialize?", "f", MB_OK);
    }

    FT_Face ftFace;
    if(FT_New_Face(ftLibrary, "arial.ttf", 0, &ftFace))
    {
        MessageBox(0, "didn't find the file", "m", MB_OK);
    }

    FT_Set_Pixel_Sizes(ftFace, 0, 64);

    ...

    GLint location;

    glDepthMask(GL_TRUE);
    //glEnable(GL_DEPTH_TEST);
    glEnable(GL_BLEND);
    glBlendFunc(GL_SRC_ALPHA, GL_ONE_MINUS_SRC_ALPHA);

    GroupOfLetters word1("H", 24, 0.6f, 0.8f, 0.2f, 1.0f);

    GLuint currentLetterTex;

    glActiveTexture(GL_TEXTURE0);
    glGenTextures(1, &currentLetterTex);
    glBindTexture(GL_TEXTURE_2D, currentLetterTex);


    glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
    glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR);

    glPixelStorei(GL_UNPACK_ALIGNMENT, 1);

    while(!glfwWindowShouldClose(window))
    {
        glClearColor(0.2f, 0.4f, 0.6f, 1.0f);
        glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT);
        glUseProgram(testProgram);

        if(FT_Load_Char(ftFace,
        'H',
        FT_LOAD_RENDER))
        {
            MessageBox(0, "A SERIOUS ERROR!", "m", MB_OK);
        }

        FT_GlyphSlot glyphSlot = ftFace->glyph;

        glTexImage2D(GL_TEXTURE_2D,
                0,
                GL_ALPHA,
                glyphSlot->bitmap.width,
                glyphSlot->bitmap.rows,
                0,
                GL_ALPHA,
                GL_UNSIGNED_BYTE,
                glyphSlot->bitmap.buffer);

        float letterColor[4];
        letterColor[0] = word1.Getred();
        letterColor[1] = word1.Getgreen();
        letterColor[2] = word1.Getblue();
        letterColor[3] = word1.Getalpha();

        scaleMatrix = glm::scale(glm::mat4(1.0f), glm::vec3(0.5f, 0.5f, 1.0f));
        translationMatrix = glm::translate(glm::mat4(1.0f),
                glm::vec3(0.0f, 0.0f, -1.0f));

        worldMatrix = translationMatrix * scaleMatrix;

        location = glGetUniformLocation(testProgram, "worldMatrix");
        glUniformMatrix4fv(location, 1, GL_FALSE, (GLfloat*)&worldMatrix);

        location = glGetUniformLocation(testProgram, "viewMatrix");
        glUniformMatrix4fv(location, 1, GL_FALSE, (GLfloat*)&viewMatrix);

        location = glGetUniformLocation(testProgram, "projectionMatrix");
        glUniformMatrix4fv(location, 1, GL_FALSE, (GLfloat*)&projectionMatrix);

        location = glGetUniformLocation(testProgram, "inTextColor");
        glUniform4fv(location, 1, letterColor);

        location = glGetUniformLocation(testProgram, "diffuse");
        glUniform1i(location, 0);

        glBindBuffer(GL_ELEMENT_ARRAY_BUFFER, indiceBuffer);
        glDrawElements(GL_TRIANGLES, 6, GL_UNSIGNED_INT, (void*)0);

        glfwSwapBuffers(window);
        glfwPollEvents();

    }

    glfwDestroyWindow(window);

    glfwTerminate();

    return 0;
}

I don't get any errors at all, just a friggin green quad staring right back at me DX<.

The shaders are working fine, I tested them with a texture I made with alpha, and it worked. Just what the hell could I possibly be doing wrong here!?

View my game dev blog here!

Advertisement

Update: Still haven't fixed the problem, but here's what I found out:

I tried outputting the glyph data that was grabbed. Apparently it is working, this is the letter 'i':


6
46
ÿÿÿÿÿÿ
ÿÿÿÿÿÿ
ÿÿÿÿÿÿ
ÿÿÿÿÿÿ
ÿÿÿÿÿÿ
ÿÿÿÿÿÿ






ÿÿÿÿÿÿ
ÿÿÿÿÿÿ
ÿÿÿÿÿÿ
ÿÿÿÿÿÿ
ÿÿÿÿÿÿ
ÿÿÿÿÿÿ
ÿÿÿÿÿÿ
ÿÿÿÿÿÿ
ÿÿÿÿÿÿ
ÿÿÿÿÿÿ
ÿÿÿÿÿÿ
ÿÿÿÿÿÿ
ÿÿÿÿÿÿ
ÿÿÿÿÿÿ
ÿÿÿÿÿÿ
ÿÿÿÿÿÿ
ÿÿÿÿÿÿ
ÿÿÿÿÿÿ
ÿÿÿÿÿÿ
ÿÿÿÿÿÿ
ÿÿÿÿÿÿ
ÿÿÿÿÿÿ
ÿÿÿÿÿÿ
ÿÿÿÿÿÿ
ÿÿÿÿÿÿ
ÿÿÿÿÿÿ
ÿÿÿÿÿÿ
ÿÿÿÿÿÿ
ÿÿÿÿÿÿ
ÿÿÿÿÿÿ
ÿÿÿÿÿÿ
ÿÿÿÿÿÿ
ÿÿÿÿÿÿ
ÿÿÿÿÿÿ

So it is apparently working... so it must be a problem with the openGL code. But I don't see what could possibly be wrong..

View my game dev blog here!

The text of the font rendering tutorial is incorrect.

Passing GL_ALPHA as the texture's format and internal format parameter is incorrect. What you want instead if pass GL_RED as the texture's format/internal-format and access the .r component from the texture sampling in your shaders like so:


outColor = vec4(1, 1, 1, texture(glyphTexture, TexCoords).r);

The text of the font rendering tutorial is incorrect.

Passing GL_ALPHA as the texture's format and internal format parameter is incorrect. What you want instead if pass GL_RED as the texture's format/internal-format and access the .r component from the texture sampling in your shaders like so:


outColor = vec4(1, 1, 1, texture(glyphTexture, TexCoords).r);

Holy shit it worked. Thank you so much you are a life saver ^w^. Now to get my vengence... jk.

View my game dev blog here!

Not at all :) someone should probably update the wiki :p

Not at all smile.png someone should probably update the wiki tongue.png

I just did lel.

View my game dev blog here!

Great! That'll probably save some headaches in the future

This topic is closed to new replies.

Advertisement