Using OpenGL with GLSL shaders.
Looks like model is loaded fine. But when I try to render it with texture, I recieve something horrible:
Here is model with texture coordinates interpreted as colors:
Here is my code for loading texture (gDebugger shows that texture is loaded normally):
glGenTextures(1,&textureId);
FREE_IMAGE_FORMAT fif = FreeImage_GetFileType(filename);
FIBITMAP* fib = FreeImage_Load(fif,filename,0);
int width = FreeImage_GetWidth(fib);
int height = FreeImage_GetHeight(fib);
char* pixels = (char*)FreeImage_GetBits(fib);
glBindTexture(GL_TEXTURE_2D,textureId);
glTexImage2D(GL_TEXTURE_2D,0,GL_RGBA,width,height,0,GL_BGR_EXT,GL_UNSIGNED_BYTE,pixels);
glTexParameteri(GL_TEXTURE_2D,GL_TEXTURE_MAG_FILTER,GL_LINEAR);
glTexParameteri(GL_TEXTURE_2D,GL_TEXTURE_MIN_FILTER,GL_LINEAR);
glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_REPEAT);
glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_REPEAT);
FreeImage_Unload(fib);
Here is my code for loading model (I work with GLM library):
glm::vec3 *vertices = new glm::vec3[m_TotalFaces*3];
glm::vec3 *normals = new glm::vec3[m_TotalFaces*3];
glm::vec2 *texCoords = new glm::vec2[m_TotalFaces*3];
Lib3dsMesh * mesh;
unsigned int FinishedFaces = 0;
// Loop through all the meshes
for (int i = 0; i < m_model->nmeshes; i++)
{
mesh = m_model->meshes;
lib3ds_mesh_calculate_face_normals(mesh, (float (*)[3])&normals[FinishedFaces*3][0]);
// Loop through every face
for(unsigned int cur_face = 0; cur_face < mesh->nfaces;cur_face++)
{
Lib3dsFace * face = &mesh->faces[cur_face];
for(unsigned int i = 0;i < 3;i++)
{
memcpy(&vertices[FinishedFaces*3 + i][0], mesh->vertices[face->index[ i ]], sizeof(float)*3);
memcpy(&texCoords[FinishedFaces*2+i][0], mesh->texcos[face->index[ i ]], sizeof(float)*2);
}
FinishedFaces++;
}
}
// Generate a Vertex Buffer Object and store it with our vertices
glf_->glGenBuffers(1, &m_VertexVBO);
glf_->glBindBuffer(GL_ARRAY_BUFFER, m_VertexVBO);
glf_->glBufferData(GL_ARRAY_BUFFER, (sizeof(float)*3) * 3 * m_TotalFaces, &vertices[0], GL_STATIC_DRAW);
// Generate another Vertex Buffer Object and store the normals in it
glf_->glGenBuffers(1, &m_NormalVBO);
glf_->glBindBuffer(GL_ARRAY_BUFFER, m_NormalVBO);
glf_->glBufferData(GL_ARRAY_BUFFER, (sizeof(float)*3) * 3 * m_TotalFaces, &normals[0], GL_STATIC_DRAW);
// Generate Vertex Buffer Object and store texture coordinates in it
glf_->glGenBuffers(1, &m_TexcoorVBO);
glf_->glBindBuffer(GL_ARRAY_BUFFER, m_TexcoorVBO);
glf_->glBufferData(GL_ARRAY_BUFFER, (sizeof(float)*2) * 3 * m_TotalFaces, &texCoords[0], GL_STATIC_DRAW);
// Clean up our allocated memory
delete[] vertices;
delete[] normals;
delete[] texCoords;
gDebugger shows that all texture coordinates in VBO belong to interval [0.0, 1.0]
Here is my vertex shader:
#version 330
in vec3 position;
in vec2 texcord;
out vec2 o_texcord;
uniform mat4 MVP;
void main()
{
gl_Position = MVP * vec4(position,1.0);
o_texcord = texcord;
}
Here is my fragment shader:
#version 330
in vec2 o_texcord;
out vec4 fragmentColor;
uniform sampler2D textureMy;
void main()
{
fragmentColor = texture2D( textureMy, o_texcord);
}
In 3ds max model looks fine:
What am I doing wrong?