Sign in to follow this  
nomonkeybusiness

OpenGL OGL is not rendering my mesh

Recommended Posts

Hi! I'm working on this 3d-engine which is supposed to support both OpenGL and Direct3D. Up until yesterday, i've been storing all my vertex-data in RAM, so I decided to convert it to upload all the data to the graphics-card. I know my data is correct, since it worked when I stored it the regular way (using a vertex-array, and an index-array). This is the old code: loading:
void VertexBuffer::Create(int p_iSize,void *p_pxData)
{
	m_iSize=p_iSize;
	m_pxBuffer=new Vertex[m_iSize];
	memcpy(m_pxBuffer,p_pxData,m_iSize*sizeof(Vertex));
};
void IndexBuffer::Create(int p_iSize,void *p_pxData)
{
	m_iSize=p_iSize;
	m_piBuffer = new u16[m_iSize];
	memcpy(m_piBuffer,p_pxData,m_iSize*sizeof(u16));
};
rendering:
Model *pxModel=p_pxRenderNode->GetModel();
	Mesh *pxMesh=pxModel->Submesh(0);

	Texture *pxTexture=p_pxRenderNode->GetTexture();
	SetTexture(pxTexture,0);

	glEnableClientState(GL_VERTEX_ARRAY);
	glEnableClientState(GL_TEXTURE_COORD_ARRAY);
	glTexCoordPointer(2,GL_FLOAT,sizeof(Vertex),((float*)pxMesh->GetVB()->GetData())+6); // GetData( ) returns a vertex-array. Note that the vertex from this version looked different
	glVertexPointer(3,GL_FLOAT,sizeof(Vertex),pxMesh->GetVB()->GetData());
	glDrawElements(GL_TRIANGLES,pxMesh->GetIB()->Size(),GL_UNSIGNED_SHORT,pxMesh->GetIB()->GetData());

	glDisableClientState(GL_TEXTURE_COORD_ARRAY);
	glDisableClientState(GL_VERTEX_ARRAY);
This is the new code: loading:
void GLVertexBuffer::create( int p_iSize, Vertex *p_pxData )
{
#define BUFFER_OFFSET( i ) ( (char *)NULL + ( i ) )

	m_iSize = p_iSize;

	glGenBuffers( 1, &m_iVBId );					// Get A Valid Name
	glBindBuffer( GL_ARRAY_BUFFER, m_iVBId );// Bind The Buffer
	// Load The Data
	glBufferData( GL_ARRAY_BUFFER, p_iSize*sizeof(Vertex), NULL, GL_STATIC_DRAW );
	glBufferSubData( GL_ARRAY_BUFFER, 0, sizeof(Vertex) * p_iSize, p_pxData );

	glTexCoordPointer( 2, GL_FLOAT, sizeof(Vertex), BUFFER_OFFSET( 24 ) );
	glNormalPointer( GL_FLOAT, sizeof(Vertex), BUFFER_OFFSET( 12 ) );
	glVertexPointer( 3, GL_FLOAT, sizeof(Vertex), BUFFER_OFFSET( 0 ) );
	
	// Our Copy Of The Data Is No Longer Necessary, It Is Safe In The Graphics Card
	delete [] p_pxData; p_pxData = NULL;
};
bool GLIndexBuffer::create( int p_iSize, u16 *p_pxData ){
	m_iSize = p_iSize;

	glGenBuffers( 1, &m_iIBId );
	glBindBuffer( GL_ELEMENT_ARRAY_BUFFER, m_iIBId );
	glBufferData( GL_ELEMENT_ARRAY_BUFFER, p_iSize * sizeof( u16 ), p_pxData, GL_STATIC_DRAW );

	return true;
};

rendering:
#define BUFFER_OFFSET( i ) ((char *)NULL + (i))
	
	Model *pxModel = p_pxRenderNode->GetModel( );
	GLMesh *pxMesh = static_cast<GLMesh*>( pxModel->Submesh( 0 ) );
	GLIndexBuffer* pxI = static_cast<GLIndexBuffer*>( pxMesh->getIB( ) );
	GLVertexBuffer* pxV = static_cast<GLVertexBuffer*>( pxMesh->getVB( ) );

	Texture *pxTexture = p_pxRenderNode->GetTexture( );
	SetTexture( pxTexture, 0 );

	glBindBuffer(GL_ARRAY_BUFFER, pxV->getId( ) );
	glBindBuffer(GL_ELEMENT_ARRAY_BUFFER, pxI->getID( ) );

	glEnableClientState(GL_TEXTURE_COORD_ARRAY);
	glEnableClientState(GL_NORMAL_ARRAY);
	glEnableClientState(GL_VERTEX_ARRAY);

	glTexCoordPointer( 2, GL_FLOAT, sizeof(Vertex), BUFFER_OFFSET( 24 ) );	
	glNormalPointer( GL_FLOAT, sizeof(Vertex), BUFFER_OFFSET( 12 ) );
	glVertexPointer( 3, GL_FLOAT, sizeof(Vertex), BUFFER_OFFSET( 0 ) );		// Set The Vertex Pointer To The Vertex Buffer

	glDrawElements( GL_TRIANGLES, pxI->size( ), GL_UNSIGNED_BYTE, BUFFER_OFFSET( 0 ) );

	glDisableClientState(GL_TEXTURE_COORD_ARRAY);
	glDisableClientState(GL_NORMAL_ARRAY);
	glDisableClientState(GL_VERTEX_ARRAY);
Am i doing something obviously wrong? I am new to VBOs, since I've only been using Direct3D before, so i wouldn't be surprised. =P Thanks. // Christo EDIT: Maybe it helps to see the vertex-class old:
class Vertex
{
public:
	float vx,vy,vz;
	float nx,ny,nz;
	float tu0,tv0;
};
new:
class Vertex
{
public:
	bool operator == ( const Vertex& p_xOther ){
		return( 
			pos[0] == p_xOther.pos[0] &&
			pos[1] == p_xOther.pos[1] &&
			pos[2] == p_xOther.pos[2] && 
			normal[0] == p_xOther.normal[0] &&
			normal[1] == p_xOther.normal[1] && 
			normal[2] == p_xOther.normal[2] && 
			uv[0] == p_xOther.uv[0] &&
			uv[1] == p_xOther.uv[1]
			);
	};

	float pos[3];
	float normal[3];
	float uv[2];
};
There...nothing weird, but there you have it.

Share this post


Link to post
Share on other sites
I don't see anything wrong here but there are some unneeded and things:

1. Remove glVertexPointer, glNormalPointer, glTexCoordPointer from VBO::create

2. Why you are using glBufferSubData when you can add the vertices here from glBufferData itself?


to the question:
Have you tried using GL_UNSIGNED_INT to see if it works? And are you sure you are loading vertices and indices correctly to the buffer?

btw. its not necessary to change float x,y,z; to float pos[3]; (same for normal and texcoord)

[Edited by - Kasya on February 24, 2010 2:00:12 PM]

Share this post


Link to post
Share on other sites
I've tried both GL_UNSIGNED_INT, GL_UNSIGNED_SHORT and GL_UNSIGNED_BYTE.
And yes, i am pretty sure. It worked on the old code, and i haven't changed a single line of code in the loading of the mesh.
I've tried setting a break-point in the create-functions, and p_pxData seems to be correct (although I can't swear, since we're talking ~27000 indices and ~8000 vertices), and anyway I can't think of a reason why it would be changed, since it is the same as before.

Share this post


Link to post
Share on other sites

Create an account or sign in to comment

You need to be a member in order to leave a comment

Create an account

Sign up for a new account in our community. It's easy!

Register a new account

Sign in

Already have an account? Sign in here.

Sign In Now

Sign in to follow this