BITMAP FONTS?

Started by
1 comment, last by tnutty 15 years, 1 month ago
I am trying to make a bitmap font following NEHE tut, but the font is not displaying. can you check out the code ?

#include<GL/glut.h>
#include<windows.h>
#include<cmath>
#include<cstdio>
#include<cstdarg>
#include"AllFiles.h"
 

GLuint base;
GLfloat cnt1;
GLfloat cnt2;
HDC hDc = NULL;

GLvoid buildFonts()
{
	HFONT hFont;
	HFONT hOldFont;

	base = glGenLists(96); //storage for 96 characters.


	hFont = CreateFont(-24,0,0,0,FW_BOLD,FALSE,FALSE,FALSE,ANSI_CHARSET,OUT_TT_PRECIS,CLIP_DEFAULT_PRECIS,ANTIALIASED_QUALITY
			,FF_DONTCARE|DEFAULT_PITCH,L"Courier New");

	hOldFont =  (HFONT) SelectObject(hDc,hFont);
	wglUseFontBitmaps(hDc,32,96,base);
	SelectObject(hDc,hOldFont);
	DeleteObject(hFont);
}

GLvoid killFont()
{
	glDeleteLists(base,96);
}

GLvoid glPrint(const char*fmt,...)
{
	char text[256];

	va_list pArg;

	if(fmt == NULL) return;

	va_start(pArg,fmt);
		vsprintf(text,fmt,pArg);
	va_end(pArg);

	glPushAttrib(GL_LIST_BIT);
		glListBase(base-32);
		glCallLists(strlen(text),GL_UNSIGNED_BYTE,text);
	glPopAttrib();

}




void initGL()
{
	
	glShadeModel(GL_SMOOTH);							// Enable Smooth Shading
	glClearDepth(1.0f);									// Depth Buffer Setup
	glEnable(GL_DEPTH_TEST);							// Enables Depth Testing
	glDepthFunc(GL_LEQUAL);								// The Type Of Depth Testing To Do
	glHint(GL_PERSPECTIVE_CORRECTION_HINT, GL_NICEST);	// Really Nice Perspective Calculations

	buildFonts();
}

void reshape(int width,int height)
{
	glViewport(0,0,width,height);
	glMatrixMode(GL_PROJECTION);
	glLoadIdentity();

	gluPerspective(45,(float)width/(float)height,1,250.0);

	glMatrixMode(GL_MODELVIEW);
	glLoadIdentity();
}


void display()
{
    glClear(GL_COLOR_BUFFER_BIT|GL_DEPTH_BUFFER_BIT);
	glLoadIdentity();
	
	//gluLookAt(0,0,1,0,0,0,0,1,0);
	glTranslatef(0.0f,0.0f,-1.0f);

	glColor3f(1.0f*cos(cnt1),1.0f*cos(cnt2),1.0f-0.5f*cos(cnt1+cnt2));
	glRasterPos2f(-0.45f+0.05f*cos(cnt1),0.32f*sin(cnt2));

	glPrint("TNUTTY 1st Font - %7.2f",cnt1);

	cnt1+=0.05f;
	cnt2+=0.005f;

	glutSwapBuffers();
	glutPostRedisplay();
 
}

void keyboard(unsigned char key, int x, int y)
{
	switch(key)
	{
	case 27: exit(0);break;
	case' ':  
	case'z':; // killFont(); exit(0); break;
	}

}

void specialFunc( int key, int x, int y)
{
	switch(key)
	{
	case  GLUT_KEY_UP : 
	case  GLUT_KEY_DOWN:  
	case  GLUT_KEY_LEFT:   
	case  GLUT_KEY_RIGHT:;
	}

}

int main(int argc, char ** argv)
{
    glutInit(&argc, argv);
    glutInitDisplayMode(GLUT_DOUBLE|GLUT_RGBA|GLUT_DEPTH);
    glutInitWindowSize(500,500);
    glutInitWindowPosition(300,200);
    glutCreateWindow("OPENGL");
    
	initGL();

    glutDisplayFunc(display);
	
	glutReshapeFunc(reshape);
	
	glutKeyboardFunc(keyboard);
	glutSpecialFunc(specialFunc);
  
	glutMainLoop();

	return 0;
}
Our whole life is a opengl application.
Advertisement
Hey me and my team mates also had this issue with NEHE tutorial, so I just decided to go the alternate way and just made every letter a quad. It may not be the most efficient but it's doing the job so far. I am able to alter the size and color all very easily. Maybe give this a try?
Alright, but I am very curious on how his code works with this method and mine's doesn't work.

[Edited by - tnutty on March 13, 2009 4:31:59 PM]
Our whole life is a opengl application.

This topic is closed to new replies.

Advertisement