void main() {
vec4 wpos = vec4(gl_Vertex.xyz, 1.0);
vec4 epos = gl_ModelViewMatrix * wpos;
epos.xy += gl_MultiTexCoord0.xy;
gl_Position = gl_ProjectionMatrix * epos;
float radius = abs(gl_MultiTexCoord0.x);
vec2 tex = -gl_MultiTexCoord0.xy / radius;
tex *= 0.5;
tex += 0.5;
gl_TexCoord[0].xy = tex;
}
for each ball, I have the 'center' in the vertex attribute (that's duplicated; each vertex gets a copy) and the radius and corner in the texture coordinate:
for(boards_t::const_iterator i=boards.begin(); i!=boards.end(); ++i) {
vbo.push_back(tex_vec_t(vec2_t(-i->radius, i->radius),i->center));
vbo.push_back(tex_vec_t(vec2_t(-i->radius,-i->radius),i->center));
vbo.push_back(tex_vec_t(vec2_t( i->radius,-i->radius),i->center));
vbo.push_back(tex_vec_t(vec2_t( i->radius, i->radius),i->center));
}
and the vbo is then an interleaved array with GL_T2F_V3F.
Is there a more speed-efficient or space-efficient-yet-not-massively-slower approach?
My vertex shader knows the center and radius of the sphere so presumably I can pass that to the shader as varying since the interpolation of it won't alter the value as all four vertices in each ball's quad have the same values.
Can I somehow use this to produce proper spherical depth and normals in the fragment shader? Should I have separate normal and depth textures, or is it cheaper to compute in the shader? And what would the math be?