So the crash is fixed now, but I can't figure out why nothing is drawing. I've been messing around for over a night now and can't find the problem. Can anyone point out if I'm setting things up incorrectly or perhaps its just another silly error like the indexing that I can't seem to spot?
I create my vertex and index buffers like this:
GLuint vbo;glGenBuffers(1, &vbo);glBindBuffer(GL_ARRAY_BUFFER_ARB, vbo);glBufferData(GL_ARRAY_BUFFER_ARB, 8 * sizeof(Vertex), 0, GL_DYNAMIC_DRAW);glBindBuffer(GL_ARRAY_BUFFER_ARB, 0);GLuint ibo;glGenBuffers(1, &ibo);glBindBuffer(GL_ELEMENT_ARRAY_BUFFER_ARB, ibo);glBufferData(GL_ELEMENT_ARRAY_BUFFER_ARB, 36 * sizeof(unsigned short), 0, GL_DYNAMIC_DRAW);glBindBuffer(GL_ELEMENT_ARRAY_BUFFER_ARB, 0);glBindBuffer(GL_ARRAY_BUFFER_ARB, vbo);glBufferSubData(vbo, 0, sizeof(Vertex) * 8, unit_cube_verts);glBindBuffer(GL_ARRAY_BUFFER_ARB, 0);glBindBuffer(GL_ELEMENT_ARRAY_BUFFER_ARB, ibo);glBufferSubData(ibo, 0, sizeof(unsigned short) * 36, unit_cube_indices);glBindBuffer(GL_ELEMENT_ARRAY_BUFFER_ARB, 0);
The function I use to create my frag/vertex shaders looks like this (with VertexShader being a typedef for GLuint):
VertexShader CreateVertexShader(const char* source){ std::ifstream file; size_t size; char* buffer; file.open(source, std::ios_base::in|std::ios_base::binary); if(!file.good()) { check(0); file.close(); return 0; } file.seekg(0, std::ios::end); size = file.tellg(); file.clear(); file.seekg(0, std::ios::beg); buffer = new char[ size + 1 ]; file.read(buffer, size); buffer[ size ] = 0; file.close(); VertexShader shader = glCreateShader(GL_VERTEX_SHADER); glShaderSource(shader, 1, (const GLchar**)&buffer, 0); glCompileShader(shader); GLint compiled; glGetShaderiv(shader, GL_COMPILE_STATUS, &compiled); if (!compiled) { int32 logLength; int32 charsWritten; glGetShaderiv(shader, GL_INFO_LOG_LENGTH, (GLint*)&logLength); if (logLength > 0) { char* infoLog = new char[logLength]; glGetInfoLogARB(shader, logLength, (GLsizei*)&charsWritten, infoLog); std::cerr << infoLog << std::endl; delete[] infoLog; } check(0); } delete[] buffer; return shader;}
I create the shader program like this (again, ShaderProgram is a typedef for GLuint):
ShaderProgram CreateShaderProgram(VertexShader vShader, FragmentShader pShader){ ShaderProgram prog = glCreateProgram(); glAttachShader(prog, vShader); glAttachShader(prog, pShader); glLinkProgram(prog); GLint linked; glGetProgramiv(prog, GL_LINK_STATUS, &linked); if (!linked) { check(0); } return prog;}
And finally the code to render my cube data which is the same as in my original post, with the indices decremented by 1:
glUseProgram(shader);glBindBuffer(GL_ARRAY_BUFFER_ARB, vbo);glBindBuffer(GL_ELEMENT_ARRAY_BUFFER_ARB, ibo);GLuint attr = glGetAttribLocation(shader, name);glEnableVertexAttribArray(attr);glVertexAttribPointer(attr, 4, GL_FLOAT, GL_FALSE, sizeof(Vertex), unit_cube_verts);glMatrixMode(GL_PROJECTION);glLoadIdentity();gluPerspective(60.f, 640.f/480.f, 0.2f, 1000.f);glMatrixMode(GL_MODELVIEW);glLoadIdentity();gluLookAt( 0.f, 0.f, -2.f, 0.f, 0.f, 0.f, 0.f, 1.f, 0.f);glColor4f(0.f, 1.f, 0.f, 1.f);glDrawElements(GL_TRIANGLES, 12, GL_UNSIGNED_SHORT, 0);
Oh and Vertex is a struct with four floats (x, y, z, & w). I'm pretty stuck right now, any help is much appreciated. Thanks.