Jump to content

  • Log In with Google      Sign In   
  • Create Account

Banner advertising on our site currently available from just $5!


1. Learn about the promo. 2. Sign up for GDNet+. 3. Set up your advert!


Zylann

Member Since 24 May 2012
Offline Last Active Yesterday, 02:31 AM

Topics I've Started

[Freetype] Cannot render font to a bitmap

09 May 2015 - 08:33 AM

Hello,

I'm integrating Freetype into my project, but so far I've never been able to render any font into a bitmap.

Freetype functions never returns an error, the bitmap I get is just empty, width and height are zero, whatever character I pass.

The fon't I'm using is a basic outline TTF.

 

In my project, I split the code in 2 files: FontLoader.cpp and Font.cpp.

FontLoader is a class that holds the FT_Library, and Font holds the FT_Face.

 

Here is the part of my code that uses Freetype (unrelated code eluded for clarity):

 

 

FontLoader.hpp

//...
#include <core/asset/AssetLoader.hpp>

#include <ft2build.h>
#include FT_FREETYPE_H

namespace freetype
{

class FontLoader : public sn::AssetLoader
{
public:

    //...

    bool load(std::ifstream & ifs, sn::Asset & asset) const override;

    //...

private:
    FT_Library m_library;

};

} // freetype
//...

FontLoader.cpp

//...
#include "Font.hpp"
#include "FontLoader.hpp"

using namespace sn;

namespace freetype
{

FontLoader::FontLoader():
    m_library(nullptr)
{
    // Initialize Freetype
    if (FT_Init_FreeType(&m_library) != 0)
    {
        SN_ERROR("Failed to initialize FreeType library");
    }
}

FontLoader::~FontLoader()
{
    if (m_library != 0)
    {
        // Deinitialize Freetype
        FT_Done_FreeType(m_library);
    }
}

//...

bool FontLoader::load(std::ifstream & ifs, sn::Asset & asset) const
{
    freetype::Font * font = sn::checked_cast<freetype::Font*>(&asset);
    SN_ASSERT(font != nullptr, "Invalid asset type");

    // Read the whole stream
    ifs.seekg(0, ifs.end);
    u32 len = ifs.tellg();
    ifs.seekg(0, ifs.beg);
    char * data = new char[len];
    ifs.read(data, len);

    // Load the face
    FT_Face face;
    if (FT_New_Memory_Face(m_library, reinterpret_cast<const FT_Byte*>(data), len, 0, &face) != 0)
    {
        SN_ERROR("Failed to create Freetype font face from memory");
        delete[] data;
        return false;
    }
    delete[] data;

    // Select the unicode character map
    if (FT_Select_Charmap(face, FT_ENCODING_UNICODE) != 0)
    {
        SN_ERROR("Failed to select the Unicode character set (Freetype)");
        return false;
    }

    // Store the loaded font
    font->setFace(face);

    return true;
}

} // namespace freetype

Font.hpp

//...
#include <ft2build.h>
#include FT_FREETYPE_H

namespace freetype
{

class Font : public sn::Font, public sn::NonCopyable
{
    //...
private:
    bool generateGlyph(sn::Glyph & out_glyph, sn::u32 unicode, sn::FontFormat format) const;
    //...
    bool setCurrentSize(sn::u32 characterSize) const;

private:
    FT_Face                                 m_face;
    //...

};

} // namespace freetype
//...

Font.cpp

#include "Font.hpp"

#include FT_GLYPH_H
#include FT_OUTLINE_H
#include FT_BITMAP_H
//...
bool Font::generateGlyph(Glyph & out_glyph, sn::u32 unicode, sn::FontFormat format) const
{
    Glyph glyph;

    if (!setCurrentSize(format.size))
        return false;

    // Load the glyph corresponding the unicode
    if (FT_Load_Char(m_face, unicode, FT_LOAD_TARGET_NORMAL) != 0)
        return false;

    // Retrieve the glyph
    FT_Glyph glyphDesc;
    if (FT_Get_Glyph(m_face->glyph, &glyphDesc) != 0)
        return false;

    // Apply bold
    FT_Pos weight = 1 << 6;
    bool outline = (glyphDesc->format == FT_GLYPH_FORMAT_OUTLINE);
    if (format.isBold() && outline)
    {
        FT_OutlineGlyph outlineGlyph = (FT_OutlineGlyph)glyphDesc;
        FT_Outline_Embolden(&outlineGlyph->outline, weight);
    }

    // Convert the glyph to a bitmap (i.e. rasterize it)
    if (glyphDesc->format != FT_GLYPH_FORMAT_BITMAP)
    {
        if (FT_Glyph_To_Bitmap(&glyphDesc, FT_RENDER_MODE_NORMAL, 0, 1) != 0)
        {
            SN_ERROR("Failed to convert glyph to bitmap");
        }
    }
    FT_BitmapGlyph bitmapGlyph = (FT_BitmapGlyph)glyphDesc;
    FT_Bitmap& bitmap = bitmapGlyph->bitmap;

    // Compute the glyph's advance offset
    glyph.advance = glyphDesc->advance.x >> 16;
    if (format.isBold())
        glyph.advance += weight >> 6;

    u32 width  = bitmap.width;
    u32 height = bitmap.rows;
    if (width > 0 && height > 0)
    {
        // NEVER ENTERS HERE

        // Funny conversion stuff
        //...
    }
    else
    {
        SN_DLOG("Character " << unicode << " (ascii: " << (char)unicode << ") has an empty bitmap");
    }

    // Delete the FT glyph
    FT_Done_Glyph(glyphDesc);

    out_glyph = glyph;

    return true;
}

//...
bool Font::setCurrentSize(sn::u32 characterSize) const
{
    SN_ASSERT(m_face != nullptr, "Invalid state: Freetype font face is null");

    FT_UShort currentSize = m_face->size->metrics.x_ppem;

    if (currentSize != characterSize)
    {
        return FT_Set_Pixel_Sizes(m_face, 0, characterSize) == 0;
    }
    else
    {
        return true;
    }
}
//...

EDIT: by stepping into FT_Glyph_To_Bitmap and further, I discovered this:

 

ftobjs.c

          error = renderer->render( renderer, slot, render_mode, NULL );
          if ( !error                                   ||
               FT_ERR_NEQ( error, Cannot_Render_Glyph ) ) // This line is reached
            break;

ftrend1.c

renderer->render being a function pointer, I steeped in it too:
    /* check rendering mode */
#ifndef FT_CONFIG_OPTION_PIC
    if ( mode != FT_RENDER_MODE_MONO )
    {
      /* raster1 is only capable of producing monochrome bitmaps */
      if ( render->clazz == &ft_raster1_renderer_class )
        return FT_THROW( Cannot_Render_Glyph ); // THIS LINE IS REACHED
    }
    else
    {
      /* raster5 is only capable of producing 5-gray-levels bitmaps */
      if ( render->clazz == &ft_raster5_renderer_class )
        return FT_THROW( Cannot_Render_Glyph );
    }
#else /* FT_CONFIG_OPTION_PIC */

I want to render with the FT_RENDER_NORMAL mode, but for some reason it seems Freetype can't sad.png


Swapping header/source in Visual Studio opens wrong file

27 April 2015 - 01:17 PM

Hello,

I was not sure where to ask this question:

 

In my project, I have a Control.hpp and Control.cpp files.

 

I use a shortcut to swap between header and source files very often, however with Control.cpp, Visual Studio 2013 leads me to a different Control.h, which is completely unrelated to my project (C:\Program Files (x86)\Windows Kits\8.1\Include\um\Control.h).

 

Actually, when a .cpp file in my C++ project matches a .h header located in this Windows Kits thing, Visual prioritizes the wrong file.

 

This is very annoying, anyone has an idea how to solve this?

 

 


wglSetCurrent() and render to texture fails

15 April 2015 - 05:32 PM

Hello,

 

I'm trying to render with OpenGL 3.3 into multiple windows.

The technique I'm using at the moment is to use only one context, and switch target windows.

 

On win32, what I did so far is something like this:

 

Initialization:

- Create main context with a dummy, invisible window (HWND)

- Create one or more visible windows

- Set an OpenGL-compatible pixel format on those windows

 

Rendering:

- Call wglMakeCurrent() before rendering on any of these windows

- Render (OpenGL calls)

- After rendering, call SwapBuffer on every window

 

 

My first test only targets one window, but even then I don't see anything, the screen stays black sad.png

If I resize the window, I see the clear color appear, but nothing more.

 

However, if I bypass post-processing effects (working with render to textures) during execution with a shortcut, I see my animated scene blink.png

Once I re-enable effects, I see two static frames flickering rapidly: one with the effects, the other without. Both don't animate.

 

 

My application worked perfectly fine before I introduced this multi-window technique, and I get no errors so far...

I previously tried with shared contexts, but got the same weird result.

 

So... I'm wondering, do I missed something about render to textures?

 

 

Note: I'm using raw win32 and OpenGL with Glew, no other libraries involved.

 

EDIT: it's "wglMakeCurrent" for the title of the topic, but I don't know how to change it.


How to handle multiple lights?

11 April 2015 - 05:03 PM

Hello,

 

I'm currently adding lighting to my game engine, and it's already fine with 1 light (directional, point or spot...).

 

However, how can I handle multiple lights efficiently?

 

I know I can write a shader with uniform arrays where each element is a light struct, or multiple arrays or primitives.

Then in a for loop, each light would contribute to the final pixel color.

uniform Light u_Lights[32];
//...
for(int i = 0; i < 32; ++i)
{
   outputColor = /* light contribution */
}

However, if I want to support N lights, would I have to write my shaders so they take N lights in those uniform arrays?

If I create 2 lights only, will I have to loop through 32 lights just to avoid branching? Unless branching is OK if N remains constant?

Then, what if N can change at runtime?

Do I have to recompile every shader that uses lights just to reset the number of lights as a compile-time constant?

 

That's a lot of questions, but they generally refer to the same problem: compile-time VS runtime performance.

What would be the best, general purpose approach?


No matching overloaded function found: mix

29 March 2015 - 01:20 PM

I'm currently porting Oculus Rift's shaders to GLSL 330, but I'm getting strange errors:
 
ERROR: 0:36: error(#202) No matching overloaded function found: mix
ERROR: 0:36: error(#160) Cannot convert from: "const float" to: "highp 4X4 matrix of mat4"
ERROR: error(#273) 2 compilation errors.  No code generated

 

At this line:

mat4 lerpedEyeRot = mix(u_EyeRotationStart, u_EyeRotationEnd, in_TimewarpLerpFactor);

I also tried to convert the last mix parameter to a mat4, with no luck wacko.png

I'm using OpenGL 3.3 on Windows 7, my graphic card is an AMD Radeon HD 6670.

 
Here is the vertex shader at time of writing:
 


#version 330

layout (location = 0) in vec3 in_Position;
//layout (location = 1) in vec4 in_Color;
layout (location = 2) in vec2 in_TexCoord0; // R
layout (location = 3) in vec2 in_TexCoord1; // G
layout (location = 4) in vec2 in_TexCoord2; // B
layout (location = 5) in float in_Vignette;
layout (location = 6) in float in_TimewarpLerpFactor;

uniform vec2 u_EyeToSourceUVScale;
uniform vec2 u_EyeToSourceUVOffset;
uniform mat4 u_EyeRotationStart;
uniform mat4 u_EyeRotationEnd;

out float v_Vignette;
smooth out vec2 v_TexCoord0;
smooth out vec2 v_TexCoord1;
smooth out vec2 v_TexCoord2;

vec2 TimewarpTexCoord(vec2 TexCoord, mat4 rotMat)
{
	// Vertex inputs are in TanEyeAngle space for the R,G,B channels (i.e. after chromatic
	// aberration and distortion). These are now "real world" vectors in direction (x,y,1)
	// relative to the eye of the HMD. Apply the 3x3 timewarp rotation to these vectors.
	vec3 transformed = vec3( mul(rotMat, vec4(TexCoord.xy, 1.0, 1.0)).xyz);
	// Project them back onto the Z=1 plane of the rendered images.
	vec2 flattened = (transformed.xy / transformed.z);
	// Scale them into ([0,0.5],[0,1]) or ([0.5,0],[0,1]) UV lookup space (depending on eye)
	return u_EyeToSourceUVScale * flattened + u_EyeToSourceUVOffset;
}

void main()
{
	mat4 lerpedEyeRot = mix(u_EyeRotationStart, u_EyeRotationEnd, in_TimewarpLerpFactor); // ERROR HERE
	v_TexCoord0 = TimewarpTexCoord(in_TexCoord0, lerpedEyeRot);
	v_TexCoord1 = TimewarpTexCoord(in_TexCoord1, lerpedEyeRot);
	v_TexCoord2 = TimewarpTexCoord(in_TexCoord2, lerpedEyeRot);
	gl_Position = vec4(in_Position.xy, 0.5, 1.0);
	v_Vignette = in_Vignette; // For vignette fade
}

PARTNERS