[Help] Vertex pos/normal/coord
[color=#000000][font=Arial,]I'm having trouble sending both normals and a u,v pair to my shaders. If I remove the normal, things work as expected.[/font]
[color=#000000][font=Arial,]
It appears the v_normal is receiving the values that are intended for v_coord. I still have no idea though.[/font][color=#000000][font=Arial,]
I've spent a lot of time trying to figure out, but I really am not able to pick a sane set of actions to debug/experiment anymore.[/font][color=#000000][font=Arial,]
This is my vertex:[/font]
[source lang="cpp"]struct Vertex{
Vertex(vec3 const & v) : pos(v) {}
vec3 pos;
vec3 normal;
real u, v;
};
[/source]
[source lang="cpp"] const int VERTEX_POS_INDX = 0;
const int VERTEX_NORMAL_INDX = 1;
const int VERTEX_TEXCOORD_INDX = 2;
const int VERTEX_POS_SIZE = 3;
const int VERTEX_NORMAL_SIZE = 3;
const int VERTEX_TEXCOORD_SIZE = 2;
GLuint vbo, ibo;
glGenBuffers(1, &vbo);
glBindBuffer(GL_ARRAY_BUFFER, vbo);
glBufferData(GL_ARRAY_BUFFER, sphere->vertices.size()*sizeof(Vertex), &sphere->vertices[0], GL_STATIC_DRAW);
glGenBuffers(1, &ibo);
glBindBuffer(GL_ELEMENT_ARRAY_BUFFER, ibo);
glBufferData(GL_ELEMENT_ARRAY_BUFFER, sphere->indices.size()*sizeof(unsigned short), &sphere->indices[0], GL_STATIC_DRAW);
glEnableVertexAttribArray ( VERTEX_POS_INDX );
glEnableVertexAttribArray ( VERTEX_NORMAL_INDX );
glEnableVertexAttribArray ( VERTEX_TEXCOORD_INDX );
int offset = 0;
glVertexAttribPointer ( VERTEX_POS_INDX, VERTEX_POS_SIZE, GL_FLOAT, GL_FALSE, sizeof(Vertex), (void*)offset );
offset += VERTEX_POS_SIZE * sizeof(real);
glVertexAttribPointer ( VERTEX_NORMAL_INDX, VERTEX_NORMAL_SIZE, GL_FLOAT, GL_FALSE, sizeof(Vertex), (void*)offset );
offset += VERTEX_NORMAL_SIZE * sizeof(real);
glVertexAttribPointer ( VERTEX_TEXCOORD_INDX, VERTEX_TEXCOORD_INDX, GL_FLOAT, GL_FALSE, sizeof(Vertex), (void*)offset );
glBindAttribLocation ( programObject, VERTEX_POS_INDX, "a_position" );
glBindAttribLocation ( programObject, VERTEX_NORMAL_INDX, "a_normal" );
glBindAttribLocation ( programObject, VERTEX_TEXCOORD_INDX, "a_coord" );
[/source]
[source lang="cpp"]precision highp float;
uniform mat4 u_mv;
uniform mat4 u_mvp;
uniform vec3 u_light;
uniform vec3 u_up;
attribute vec3 a_position;
attribute vec2 a_coord;
attribute vec3 a_normal;
varying vec2 v_coord;
varying vec3 v_normal;
void main() {
v_coord = a_coord;
v_normal = a_normal;
gl_Position = u_mvp * vec4(a_position, 1);
}
[/source]
[source lang="cpp"]precision highp float;
uniform vec3 u_up;
varying vec3 v_normal;
varying vec2 v_coord;
precision highp float;
uniform vec3 u_up;
varying vec3 v_normal;
varying vec2 v_coord;
void main()
{
vec2 coord = v_coord;
vec3 normal = v_normal;
coord.x = mod(v_coord.x * 5.0, 1.0);
coord.y = mod(v_coord.y * 5.0, 1.0);
gl_FragColor = vec4 (
mod(coord.x*1.0,1.0),
mod(coord.y*1.0,1.0),
mod(normal.z*5.0,1.0)*0.0,
1.0 );
}
[/source]
[color=#000000][font=Arial,]
It appears the v_normal is receiving the values that are intended for v_coord. I still have no idea though.[/font][color=#000000][font=Arial,]
I've spent a lot of time trying to figure out, but I really am not able to pick a sane set of actions to debug/experiment anymore.[/font][color=#000000][font=Arial,]
This is my vertex:[/font]
[source lang="cpp"]struct Vertex{
Vertex(vec3 const & v) : pos(v) {}
vec3 pos;
vec3 normal;
real u, v;
};
[/source]
[source lang="cpp"] const int VERTEX_POS_INDX = 0;
const int VERTEX_NORMAL_INDX = 1;
const int VERTEX_TEXCOORD_INDX = 2;
const int VERTEX_POS_SIZE = 3;
const int VERTEX_NORMAL_SIZE = 3;
const int VERTEX_TEXCOORD_SIZE = 2;
GLuint vbo, ibo;
glGenBuffers(1, &vbo);
glBindBuffer(GL_ARRAY_BUFFER, vbo);
glBufferData(GL_ARRAY_BUFFER, sphere->vertices.size()*sizeof(Vertex), &sphere->vertices[0], GL_STATIC_DRAW);
glGenBuffers(1, &ibo);
glBindBuffer(GL_ELEMENT_ARRAY_BUFFER, ibo);
glBufferData(GL_ELEMENT_ARRAY_BUFFER, sphere->indices.size()*sizeof(unsigned short), &sphere->indices[0], GL_STATIC_DRAW);
glEnableVertexAttribArray ( VERTEX_POS_INDX );
glEnableVertexAttribArray ( VERTEX_NORMAL_INDX );
glEnableVertexAttribArray ( VERTEX_TEXCOORD_INDX );
int offset = 0;
glVertexAttribPointer ( VERTEX_POS_INDX, VERTEX_POS_SIZE, GL_FLOAT, GL_FALSE, sizeof(Vertex), (void*)offset );
offset += VERTEX_POS_SIZE * sizeof(real);
glVertexAttribPointer ( VERTEX_NORMAL_INDX, VERTEX_NORMAL_SIZE, GL_FLOAT, GL_FALSE, sizeof(Vertex), (void*)offset );
offset += VERTEX_NORMAL_SIZE * sizeof(real);
glVertexAttribPointer ( VERTEX_TEXCOORD_INDX, VERTEX_TEXCOORD_INDX, GL_FLOAT, GL_FALSE, sizeof(Vertex), (void*)offset );
glBindAttribLocation ( programObject, VERTEX_POS_INDX, "a_position" );
glBindAttribLocation ( programObject, VERTEX_NORMAL_INDX, "a_normal" );
glBindAttribLocation ( programObject, VERTEX_TEXCOORD_INDX, "a_coord" );
[/source]
[source lang="cpp"]precision highp float;
uniform mat4 u_mv;
uniform mat4 u_mvp;
uniform vec3 u_light;
uniform vec3 u_up;
attribute vec3 a_position;
attribute vec2 a_coord;
attribute vec3 a_normal;
varying vec2 v_coord;
varying vec3 v_normal;
void main() {
v_coord = a_coord;
v_normal = a_normal;
gl_Position = u_mvp * vec4(a_position, 1);
}
[/source]
[source lang="cpp"]precision highp float;
uniform vec3 u_up;
varying vec3 v_normal;
varying vec2 v_coord;
precision highp float;
uniform vec3 u_up;
varying vec3 v_normal;
varying vec2 v_coord;
void main()
{
vec2 coord = v_coord;
vec3 normal = v_normal;
coord.x = mod(v_coord.x * 5.0, 1.0);
coord.y = mod(v_coord.y * 5.0, 1.0);
gl_FragColor = vec4 (
mod(coord.x*1.0,1.0),
mod(coord.y*1.0,1.0),
mod(normal.z*5.0,1.0)*0.0,
1.0 );
}
[/source]
Your Vertex struct is not pure data type. So you can't assume that
offset += VERTEX_POS_SIZE * sizeof(real); [font=arial,helvetica,sans-serif]and the other will be the offset you expect.[/font]
offset += VERTEX_POS_SIZE * sizeof(real); [font=arial,helvetica,sans-serif]and the other will be the offset you expect.[/font]
A check: You are doing the linking (glLinkProgram) of the shader program after the call of glBindAttribLocation(), don't you?
I usually do the glVertexAttribPointer offset as follows:
[color=black][font=Consolas,]struct Vertex *p = 0;[/font]
glVertexAttribPointer ( VERTEX_POS_INDX, VERTEX_POS_SIZE, GL_FLOAT, GL_FALSE, sizeof(Vertex), &p->pos);
glVertexAttribPointer ( VERTEX_NORMAL_INDX, VERTEX_NORMAL_SIZE, GL_FLOAT, GL_FALSE, sizeof(Vertex), &p->normal );
glVertexAttribPointer ( VERTEX_TEXCOORD_INDX, VERTEX_TEXCOORD_INDX, GL_FLOAT, GL_FALSE, sizeof(Vertex), &p->u );
I usually do the glVertexAttribPointer offset as follows:
[color=black][font=Consolas,]struct Vertex *p = 0;[/font]
glVertexAttribPointer ( VERTEX_POS_INDX, VERTEX_POS_SIZE, GL_FLOAT, GL_FALSE, sizeof(Vertex), &p->pos);
glVertexAttribPointer ( VERTEX_NORMAL_INDX, VERTEX_NORMAL_SIZE, GL_FLOAT, GL_FALSE, sizeof(Vertex), &p->normal );
glVertexAttribPointer ( VERTEX_TEXCOORD_INDX, VERTEX_TEXCOORD_INDX, GL_FLOAT, GL_FALSE, sizeof(Vertex), &p->u );
Better again to use the GL3.x+ syntax (if available in your implementation) for this in your shader:
layout(location = 0) in vec4 position;
@szecs: sizeof(Vertex) returns 32, which is 8*sizeof(float).
@larspensjo: I tried both tricks (which are really nifty) but to no avail.
@mhagain: I am targeting opengl es 2 on iPhone, so OGL3 is unfortunately not an option.
@larspensjo: I tried both tricks (which are really nifty) but to no avail.
@mhagain: I am targeting opengl es 2 on iPhone, so OGL3 is unfortunately not an option.
Got it solved!
I just had to use glGetAttributeLocation to get the identifiers from the linked shader program.
I just had to use glGetAttributeLocation to get the identifiers from the linked shader program.
Got it solved!
I just had to use glGetAttributeLocation to get the identifiers from the linked shader program.
Nice to see! But that means that the call to [color=#282828][font=helvetica, arial, verdana, tahoma, sans-serif]
[background=rgb(250, 251, 252)]glBindAttribLocation() wasn't done before the linkage phase, or the identifiers would have been the one you specified.[/background]
[/font][color=#282828][font=helvetica, arial, verdana, tahoma, sans-serif]
[background=rgb(250, 251, 252)]If you are using the same VAO for more than one program, you can't use [/background]
[/font]glGetAttributeLocation() as the results may be different.
@larspensjo: Thanks, got it!
[color=#282828][font=helvetica, arial, verdana, tahoma, sans-serif][background=rgb(250, 251, 252)]A check: You are doing the linking (glLinkProgram) of the shader program after the call of glBindAttribLocation(), don't you?[/quote][/background]
[/font]
[background=rgb(250, 251, 252)][font="helvetica, arial, verdana, tahoma, sans-serif"][size="2"][color="#282828"]I thought you were warning me of a pitfall rather than reminding me to do the right thing.[/font][/background]
This topic is closed to new replies.
Advertisement
Popular Topics
Advertisement