I'm having a trouble getting multitexturing to work under GLSL. I've never done it using fixed functionality, so perhaps I lack the knowledge to know what it is I'm doing wrong, but from what I've read I think I'm doing it properly.
Here is the code which sets up my shader program and (for the time being) passes in the multitexturing variables:
void Shader::CreateProgram()
{
m_Program = glCreateProgram();
CreateVertexShader();
CreateFragmentShader();
glLinkProgram(m_Program);
glUseProgram(m_Program);
m_Texture0 = new Texture("Data\\Textures\\grass.bmp");
m_Texture0->Bind(0);
int texture0Location = glGetUniformLocation(m_Program, "texture0");
glUniform1i(texture0Location, 0);
m_Texture1 = new Texture("Data\\Textures\\rock.bmp");
m_Texture1->Bind(1);
int texture1Location = glGetUniformLocation(m_Program, "texture1");
glUniform1i(texture1Location, 1);
}
And Texture::Bind looks like this:
void Texture::Bind(const int& textureIndex) const
{
glActiveTexture(GL_TEXTURE0 + textureIndex);
if(glIsTexture(m_ID))
glBindTexture(GL_TEXTURE_2D, m_ID);
}
So I'm setting the active texture unit, binding my texture object (by texture ID) to that active unit, and passing in the texture unit number as a uniform to my shaders, which look like this:
Vertex (the 3rd and 4th lines from the bottom should be the only relevant ones):
void main()
{
vec3 eyePos = vec3(gl_ModelViewMatrix * gl_Vertex);
vec3 normalEyeSpace = normalize(gl_NormalMatrix * gl_Normal);
vec3 lightDirection = normalize(gl_LightSource[0].position.xyz - eyePos);
float reflectionAngle = max(dot(normalEyeSpace, lightDirection), 0.0);
vec4 diffuseIntensity = reflectionAngle *
gl_FrontMaterial.diffuse *
gl_LightSource[0].diffuse;
vec4 ambient = gl_FrontMaterial.ambient *
(gl_LightSource[0].ambient + gl_LightModel.ambient);
gl_TexCoord[0] = gl_MultiTexCoord0;
gl_TexCoord[1] = gl_MultiTexCoord1;
gl_FrontColor = gl_Color * diffuseIntensity + ambient;
gl_Position = ftransform();
}
Fragment:
uniform sampler2D texture0;
uniform sampler2D texture1;
void main()
{
vec4 fragColor = texture2D(texture0, gl_TexCoord[0].st);
gl_FragColor = fragColor * gl_Color;
}
You'll notice that at the moment I'm only dealing with a single texture at a time (I wanted to get that up and running before I started blending textures). However, both of the two following lines of shader code:
vec4 fragColor = texture2D(texture0, gl_TexCoord[0].st);
vec4 fragColor = texture2D(texture1, gl_TexCoord[1].st);
use the second texture (the rock). The way I've set it up, I'd expect the first line to use the grass and the second to use the rock.
I'm not sure if it's important, but here's a sample of how I bind the textures to a vertex:
glNormal3f(-m_Faces.A->normal.GetX(), m_Faces.A->normal.GetY(), m_Faces.A->normal.GetZ());
glMultiTexCoord2fARB(GL_TEXTURE0_ARB, 0.0f, 1.0f);
glMultiTexCoord2fARB(GL_TEXTURE1_ARB, 0.0f, 1.0f);
glVertex3f(m_Faces.A->pos.GetX(), m_Faces.A->pos.GetY(), m_Faces.A->pos.GetZ());
Can anyone see what it is that I'm doing wrong? From what I've read, that should be it, or at least close. But not matter what I do, that second texture always gets used.
Thanks in advance.
Without order nothing can exist - without chaos nothing can evolve.