My GLSL shader in my game engine has a very odd bug that my roomate found today. On his AMD graphics card any model that is skinned/animated is invisible. The models show up fine on my Nvidia card so this is ATI specific. The shaders compile fine on his card and there are no errors in the AMD GPU shader analyzer which is very odd.
We messed with this for a few hours and I found the block of code that causes the invisible models glitch. It's the part of the GLSL vertex shader that does the rotation for skinned meshes. Something here IS causing the problem. If I comment this out everything renders fine but nothing rotates obviously.
Here are all my uniforms, attributes, and varyings in the vertex shader:
uniform bool bRotation;
uniform vec4 modelPosition;
uniform vec4 modelRotation;
uniform float modelScale;
uniform bool bSkin;
uniform vec3 bonesOffset[64];
uniform vec3 bonesPos[64];
uniform vec4 bonesRot[64];
uniform vec4 sunLocation;
uniform vec3 sunDir;
varying vec3 halfVec;
varying vec3 eyeVec;
varying vec3 lightVec;
varying vec4 position;
varying vec3 normal;
attribute vec3 indices;
attribute vec3 weights;
attribute vec3 tangent;
varying vec4 nearShadowCoord;
varying vec4 farShadowCoord;
Here is the part of the vertex shader that is causing the problem:
//do rotation
i = 0;
vec4 v = pos;
vec4 rot[3];
vec4 nor[3];
while(i < 3) {
vec4 r = bonesRot[int(indices[i])];
rot[i] = vec4(quatRotation(vec3(v.x, v.y, v.z), r), 0.0f);
nor[i] = vec4(quatRotation(normal, r), 0.0f);
i++;
}
//Average the rotations by weight and apply them
i = 0;
vec4 final;
normal = vec3(0.0f,0.0f,0.0f);
while(i < 3) {
final.x += (rot[i].x * weights[i]);
final.y += (rot[i].y * weights[i]);
final.z += (rot[i].z * weights[i]);
normal.x += (nor[i].x * weights[i]);
normal.y += (nor[i].y * weights[i]);
normal.z += (nor[i].z * weights[i]);
i++;
}
pos = final;
And here is the quatRotation() function that is used in the above code on lines 8 and 9:
vec3 quatRotation(vec3 v, vec4 r) {
float q00 = 2.0f * r.x * r.x;
float q11 = 2.0f * r.y * r.y;
float q22 = 2.0f * r.z * r.z;
float q01 = 2.0f * r.x * r.y;
float q02 = 2.0f * r.x * r.z;
float q03 = 2.0f * r.x * r.w;
float q12 = 2.0f * r.y * r.z;
float q13 = 2.0f * r.y * r.w;
float q23 = 2.0f * r.z * r.w;
vec3 f = vec3(0.0f,0.0f,0.0f);
f.x = (1.0f - q11 - q22) * v.x + (q01 - q23) * v.y + (q02 + q13) * v.z;
f.y = (q01 + q23) * v.x + (1.0f - q22 - q00) * v.y + (q12 - q03) * v.z;
f.z = (q02 - q13) * v.x + (q12 + q03) * v.y + (1.0f - q11 - q00) * v.z;
return f;
}
I have no idea what's actually wrong with my code. I develop on Nvidia and everything works great. I'm stumped D: