#include <GL/glut.h>
#include <math.h>
GLsizei winWidth = 400, winHeight = 400;
#define X .525731112119133606
#define Z .850650808352039932
static GLfloat vdata[12][3] = {
{-X, 0.0, Z}, {X, 0.0, Z}, {-X, 0.0, -Z}, {X, 0.0, -Z},
{0.0, Z, X}, {0.0, Z, -X}, {0.0, -Z, X}, {0.0, -Z, -X},
{Z, X, 0.0}, {-Z, X, 0.0}, {Z, -X, 0.0}, {-Z, -X, 0.0}
};
static GLint tindices[20][3] = {
{0,4,1}, {0,9,4}, {9,5,4}, {4,5,8}, {4,8,1},
{8,10,1}, {8,3,10}, {5,3,8}, {5,2,3}, {2,7,3},
{7,10,3}, {7,6,10}, {7,11,6}, {11,0,6}, {0,1,6},
{6,1,10}, {9,0,11}, {9,11,2}, {9,2,5}, {7,2,11} };
float aL[] = { 0.3, 0.3, 0.3, 1.0 };
float dL[] = { 1.0, 1.0, 1.0, 1.0 };
float sL[] = { 1.0, 1.0, 1.0, 1.0 };
float lP[] = { 0.0, 0.0, -4.0, 1.0 };
int i, j;
int menIdMain;
int depth = 0;
void init (void)
{
glClearColor (0.0, 0.0, 0.0, 0.0);
glEnable(GL_DEPTH_TEST);
glDepthFunc(GL_LEQUAL);
glShadeModel(GL_FLAT);
glLightfv(GL_LIGHT0, GL_AMBIENT, aL);
glLightfv(GL_LIGHT0, GL_DIFFUSE, dL);
glLightfv(GL_LIGHT0, GL_SPECULAR,sL);
glLightfv(GL_LIGHT0, GL_POSITION,lP);
glEnable(GL_LIGHTING);
glEnable(GL_LIGHT0);
}
void mainMenu(int value)
{
switch (value)
{
case 1:
depth += 1;
break;
case 2:
if (depth != 0)
depth -= 1;
break;
case 3:
exit(0);
break;
}
}
void normalize(float v[3])
{
GLfloat d = sqrt(v[1]*v[1]+v[2]*v[2]+v[3]*v[3]);
if (d == 0.0)
return;
v[1] /= d; v[2] /= d; v[3] /= d;
}
void drawtriangle(float *v1, float *v2, float *v3)
{
glBegin(GL_POLYGON);
glNormal3fv(v1); glVertex3fv(v1);
glNormal3fv(v2); glVertex3fv(v2);
glNormal3fv(v3); glVertex3fv(v3);
glEnd();
}
void subdivide(float *v1, float *v2, float *v3, int depth)
{
GLfloat v12[3], v23[3], v31[3];
GLint i;
if (depth == 0) {
drawtriangle(v1, v2, v3);
return;
}
for (i = 0; i < 3; i++)
{
v12 = v1+v2;
v23 = v2+v3;
v31 = v3+v1;
}
normalize(v12);
normalize(v23);
normalize(v31);
subdivide(v1, v12, v31, depth-1);
subdivide(v2, v23, v12, depth-1);
subdivide(v3, v31, v23, depth-1);
subdivide(v12, v23, v31, depth-1);
}
void drawScene(void)
{
glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT);
glColor3f(1.0,0.0,0.0);
for (i = 0; i < 20; i++)
{
subdivide(&vdata[tindices[0]][0],
&vdata[tindices[1]][0],
&vdata[tindices[2]][0], depth);
}
glutSwapBuffers();
}
void winReshapeFcn (int newWidth, int newHeight)
{
glMatrixMode (GL_PROJECTION);
glLoadIdentity ( );
glClear (GL_COLOR_BUFFER_BIT);
// Set the viewport to be the entire window
glViewport(0, 0, newWidth, newHeight);
/* Reset display-window size parameters. */
winWidth = newWidth;
winHeight = newHeight;
}
void idle (void)
{
}
void main (int argc, char** argv)
{
glutInit (&argc, argv);
glutInitDisplayMode (GLUT_DOUBLE | GLUT_RGB | GLUT_DEPTH);
glutInitWindowPosition (50, 50);
glutInitWindowSize (winWidth, winHeight);
glutCreateWindow ("Recursive Subdivision");
init ();
glutIdleFunc (idle);
/* Create main menu */
menIdMain = glutCreateMenu(mainMenu);
glutAddMenuEntry("Increase Depth", 1);
glutAddMenuEntry("Decrease Depth", 2);
glutAddMenuEntry("Quit", 3);
glutAttachMenu(GLUT_LEFT_BUTTON);
glutDisplayFunc (drawScene);
glutReshapeFunc (winReshapeFcn);
glutMainLoop ();
}
Icosahedron Recursive Subdivision
I am trying to get the recursive subdivision working from chapter 2 of the redbook (http://rush3d.com/reference/opengl-redbook-1.1/chapter02.html). However, I am having problems. I think I am implementing it exactly as the book states, but I am getting run time errors when I try to add more depth. I have no idea what the problem is. If anyone has any clue let me know. My source code is below:
[edit: please use [ source ] tags for large chunks of code. -superpig]
[Edited by - superpig on October 4, 2004 8:48:36 AM]
One thing I noticed:
In C++, every array starts with index 0. So if you're lucky, your normalize function will produce an error when accessing v[3] (since v only knows v[0],v[1],v[2]). If you're unlucky, you won't realize anything except that normalize won't work. The right code should be
EDIT: In the rest of your code, you seem to index arrays in the right way, so I guess it was just a stupid mistake and you know about right array indexing.
void normalize(float v[3]) { GLfloat d = sqrt(v[1]*v[1]+v[2]*v[2]+v[3]*v[3]); if (d == 0.0) return; v[1] /= d; v[2] /= d; v[3] /= d; }
In C++, every array starts with index 0. So if you're lucky, your normalize function will produce an error when accessing v[3] (since v only knows v[0],v[1],v[2]). If you're unlucky, you won't realize anything except that normalize won't work. The right code should be
void normalize(float v[3]) { GLfloat d = sqrt(v[0]*v[0]+v[1]*v[1]+v[2]*v[2]); if (d == 0.0) return; d = 1/d; // 3 mults and 1 div is faster than 3 divs v[0] *= d; v[1] *= d; v[2] *= d; }
EDIT: In the rest of your code, you seem to index arrays in the right way, so I guess it was just a stupid mistake and you know about right array indexing.
This topic is closed to new replies.
Advertisement
Popular Topics
Advertisement