Archived

This topic is now archived and is closed to further replies.

GL Bitmap Fonts problem

This topic is 4959 days old which is more than the 365 day threshold we allow for new replies. Please post a new topic.

If you intended to correct an error in the post then please contact us.

Recommended Posts

Hello, I am trying to display a WGL BitmapFont. I am writing a second project that is using the same font rendering code as another one of my projects that is working fine. My problem is that I am stumped on why my text is not displaying. I know that OpenGL is a state machine, and settings enabled and disabled stay that way until changed. So is there any OpenGL setting that is conflicting with this text being shown? Thanks very much! Here is some code:
void BitmapFont::Print(const char *str, ...)
{
	char text[256];
	va_list args;

	if (str == NULL)
		return;

	va_start(args,str);
		vsprintf(text, str, args);
	va_end(args);

	glDisable (GL_TEXTURE_2D);
	glDisable(GL_LIGHTING);
		glPushMatrix();
			glLoadIdentity();
			glTranslatef(0.0f, 0.0f, -1.0f);
			glColor4f(r, g, b, a);
			glRasterPos2f(x,y);

			glPushAttrib(GL_LIST_BIT);
					glListBase(listBase - 32);
					glCallLists(strlen(text), GL_UNSIGNED_BYTE, text);
			glPopAttrib();
		glPopMatrix();
	glEnable(GL_LIGHTING);
	glEnable (GL_TEXTURE_2D);
}

...

void BitmapFont::Build(char *Name, int fsize)
{
	HFONT hFont;	// font ID

	HFONT oldFont;

	g_HDC = wglGetCurrentDC();
	listBase = glGenLists(96);

	if (stricmp(Name, "symbol") == 0)
	{
		hFont = CreateFont(-fsize, 0,0,0,FW_BOLD, FALSE, FALSE, FALSE, SYMBOL_CHARSET,
						OUT_TT_PRECIS, CLIP_DEFAULT_PRECIS, ANTIALIASED_QUALITY,
						FF_DONTCARE | DEFAULT_PITCH, Name);
	}
	else
	{
		hFont = CreateFont(-fsize, 0,0,0,FW_BOLD, FALSE, FALSE, FALSE, ANSI_CHARSET,
					OUT_TT_PRECIS, CLIP_DEFAULT_PRECIS, ANTIALIASED_QUALITY, 
					FF_DONTCARE | DEFAULT_PITCH, Name);
	}

	oldFont = (HFONT)SelectObject(g_HDC, hFont);
	
	wglUseFontBitmaps(g_HDC, 32, 96, listBase); 		
	
	SelectObject(g_HDC, oldFont); 	// Selects The Font We Want

	DeleteObject(hFont); 			// Delete The Font

}

...

int InitGL ()
{
	glShadeModel(GL_SMOOTH);				// Enable Smooth Shading

	glClearColor(0.0f, 0.0f, 0.0f, 0.5f);			// Black Background

	glClearDepth(1.0f);					// Depth Buffer Setup

	glEnable(GL_DEPTH_TEST);				// Enables Depth Testing

	glEnable(GL_CULL_FACE);
	glDepthFunc(GL_LEQUAL);					// The Type Of Depth Testing To Do

	glHint(GL_PERSPECTIVE_CORRECTION_HINT, GL_NICEST);	// Really Nice Perspective Calculations


	// Turn on depth testing and texture mapping

	glEnable(GL_TEXTURE_2D);

	// Enable front face culling, since that's what Quake3 does

	glCullFace(GL_FRONT);

	glActiveTextureARB		 = (PFNGLACTIVETEXTUREARBPROC)	 wglGetProcAddress("glActiveTextureARB");
    glClientActiveTextureARB = (PFNGLACTIVETEXTUREARBPROC)   wglGetProcAddress("glClientActiveTextureARB");

	// Here we make sure that the functions were loaded properly

	if(!glActiveTextureARB || !glClientActiveTextureARB)
	{
		// Display an error message and quit

		MessageBox(hWnd, "Your video card doesn't support multitexturing", "Error", MB_OK);
		PostQuitMessage(0);
	}

	return 1;
}

[edited by - gooball on May 17, 2004 3:42:29 PM]

Share this post


Link to post
Share on other sites