im looking for help with this debug assertion failed error i keep getting on runtime
here is my code:
main.cpp
#include "main.h"
#include "3dsloader.h"
void lighting();
void initRendering();
void drawScene();
void mainLoop();
Object* testcube;
int main(int argc, char **argv)
{
bool running = true;
InitializeWindow();
initRendering();
testcube = new Object("apex.3ds");
testcube->CreateVBO();
mainLoop();
return 0;
}
void initRendering()
{
glClearDepth(1);
glEnable(GL_DEPTH_TEST);
glEnable(GL_CULL_FACE); //face culling
glEnable(GL_LIGHTING); //enable lighting
glEnable(GL_LIGHT0);
glEnable(GL_LIGHT1);
glEnable(GL_NORMALIZE); // auto normalize normals
glShadeModel(GL_SMOOTH); //enable smooth shading
}
void mainLoop(void)
{
// the time of the previous frame
double old_time = glfwGetTime();
// this just loops as long as the program runs
while(1)
{
// escape to quit, arrow keys to rotate view
if (glfwGetKey(GLFW_KEY_ESC) == GLFW_PRESS)
break;
// draw the figure
drawScene();
// swap back and front buffers
glfwSwapBuffers();
}
}
void drawScene()
{
//clear info from last draw
glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT);
glMatrixMode(GL_MODELVIEW); //switch drawing perspective
glLoadIdentity();// reset drawing perspective
glTranslatef(0.0f, 0.0f, -5.0f); // translate camera
// ADD SCENE OBJECTS TO RENDER HERE
testcube->Draw();
glfwSwapBuffers();
};
void shutdown()
{
glfwTerminate();
delete testcube;
exit(1);
}
main.h:
//Include STD headers
#include<GL/glew.h>
#include<GL/glfw.h>
#include <vector>
#include<algorithm>
#include <fstream>
#include<cstdio>
#include <iostream>
#include <stdio.h>
#include <string.h>
#include <stdlib.h>
#include <math.h>
//Include GLM
#include <glm/glm.hpp>
using namespace glm;
extern void InitializeWindow();
extern void shutdown();
initialize.cpp
#include "main.h"
void shutdown();
void InitializeWindow()
{
const int window_width = 800,
window_height = 600;
if (glfwInit() != GL_TRUE)
shutdown();
// 800 x 600, 16 bit color, no depth, alpha or stencil buffers, windowed
if (glfwOpenWindow(window_width, window_height, 5, 6, 5,
0, 0, 0, GLFW_WINDOW) != GL_TRUE)
shutdown();
glfwSetWindowTitle("Pheonix engine R1");
glewInit();
// set the projection matrix to a normal frustum with a max depth of 50
glMatrixMode(GL_PROJECTION);
glLoadIdentity();
float aspect_ratio = ((float)window_height) / window_width;
glFrustum(.5, -.5, -.5 * aspect_ratio, .5 * aspect_ratio, 1, 50);
glMatrixMode(GL_MODELVIEW);
}
3dsloader.h
#include "main.h"
#include "lib3ds/file.h"
#include "lib3ds/mesh.h"
class Object
{
public:
Object(std:: string filename);
virtual ~Object();
virtual void Draw() const;
virtual void CreateVBO();
protected:
void GetFaces();
unsigned int m_TotalFaces;
Lib3dsFile * m_model;
GLuint m_VertexVBO, m_NormalVBO, m_TexCoordVBO;
};
3dsloader.cpp
#include "3dsloader.h"
Object::Object(std:: string filename)
{
m_TotalFaces = 0;
m_model = lib3ds_file_load(filename.c_str());
// If loading the model failed, we throw an exception
if(!m_model)
{
throw strcat("Unable to load ", filename.c_str());
}
}
Object::~Object()
{
}
void Object::GetFaces()
{
m_TotalFaces = 0;
Lib3dsMesh * mesh;
// Loop through every mesh.
for(mesh = m_model->meshes;mesh != NULL;mesh = mesh->next)
{
// Add the number of faces this mesh has to the total number of faces.
m_TotalFaces += mesh->faces;
}
}
void Object::CreateVBO()
{
// Calculate the number of faces we have in total.
GetFaces();
// Allocate memory for our vertices, normals and texture-coordinates.
Lib3dsVector * vertices = new Lib3dsVector[m_TotalFaces * 3];
Lib3dsVector * normals = new Lib3dsVector[m_TotalFaces * 3];
Lib3dsVector * texCoords = new Lib3dsVector[m_TotalFaces * 2];
Lib3dsMesh * mesh;
unsigned int FinishedFaces = 0;
// Loop through all the meshes.
for(mesh = m_model->meshes;mesh != NULL;mesh = mesh->next)
{
lib3ds_mesh_calculate_normals(mesh, &normals[FinishedFaces*3]);
// Loop through every face.
for(unsigned int cur_face = 0; cur_face < mesh->faces;cur_face++)
{
Lib3dsFace * face = &mesh->faceL[cur_face];
for(unsigned int i = 0;i < 3;i++)
{
// If there are texture-coordinates.
if(mesh->texels)
{
memcpy(&texCoords[FinishedFaces*2 + i], mesh->texelL[face->points[ i ]], sizeof(Lib3dsTexel));
}
memcpy(&vertices[FinishedFaces*3 + i], mesh->pointL[face->points[ i ]].pos, sizeof(Lib3dsVector));
}
FinishedFaces++;
}
}
// Generate a VBO and store it with our vertices.
glGenBuffers(1, &m_VertexVBO);
glBindBuffer(GL_ARRAY_BUFFER, m_VertexVBO);
glBufferData(GL_ARRAY_BUFFER, sizeof(Lib3dsVector) * 3 * m_TotalFaces, vertices, GL_STATIC_DRAW);
// Generate another VBO and store the normals in it.
glGenBuffers(1, &m_NormalVBO);
glBindBuffer(GL_ARRAY_BUFFER, m_NormalVBO);
glBufferData(GL_ARRAY_BUFFER, sizeof(Lib3dsVector) * 3 * m_TotalFaces, normals, GL_STATIC_DRAW);
// Generate a third VBO and store the texture coordinates in it.
glGenBuffers(1, &m_TexCoordVBO);
glBindBuffer(GL_ARRAY_BUFFER, m_TexCoordVBO);
glBufferData(GL_ARRAY_BUFFER, sizeof(Lib3dsTexel) * 3 * m_TotalFaces, texCoords, GL_STATIC_DRAW);
// Clean up our allocated memory because the data is now stored in the GPU.
delete vertices;
delete normals;
delete texCoords;
// We no longer need lib3ds.
lib3ds_file_free(m_model);
m_model = NULL;
}
void Object:: Draw() const
{
// Enable vertex, normal and texture-coordinate arrays.
glEnableClientState(GL_VERTEX_ARRAY);
glEnableClientState(GL_NORMAL_ARRAY);
glEnableClientState(GL_TEXTURE_COORD_ARRAY);
// Bind the VBO with the normals.
glBindBuffer(GL_ARRAY_BUFFER, m_NormalVBO);
// The pointer for the normals is NULL which means that OpenGL will use the currently bound VBO.
glNormalPointer(GL_FLOAT, 0, NULL);
glBindBuffer(GL_ARRAY_BUFFER, m_TexCoordVBO);
glTexCoordPointer(2, GL_FLOAT, 0, NULL);
glBindBuffer(GL_ARRAY_BUFFER, m_VertexVBO);
glVertexPointer(3, GL_FLOAT, 0, NULL);
// Render the triangles.
glDrawArrays(GL_TRIANGLES, 0, m_TotalFaces * 3);
glDisableClientState(GL_VERTEX_ARRAY);
glDisableClientState(GL_NORMAL_ARRAY);
glDisableClientState(GL_TEXTURE_COORD_ARRAY);
}
I have also attached a screencap of the error for you guys
thanks for your help in advance community,
Adam