• Advertisement
Sign in to follow this  

What am I doing wrong with these vertex attributes

This topic is 980 days old which is more than the 365 day threshold we allow for new replies. Please post a new topic.

If you intended to correct an error in the post then please contact us.

Recommended Posts

My shader has the following attributes:

 

layout(location = 0) in float pos_x;
layout(location = 1) in float pos_y;
layout(location = 2) in float rotation;
layout(location = 3) in vec2 scale;
layout(location = 4) in uint TextureID;

 

And the GL code is as follows:

 

glNamedBufferStorage(buffers[0], sprite_count * sizeof(float), sprites.position_x.data(), GL_DYNAMIC_STORAGE_BIT);
glNamedBufferStorage(buffers[1], sprite_count * sizeof(float), sprites.position_y.data(), GL_DYNAMIC_STORAGE_BIT);
glNamedBufferStorage(buffers[2], sprite_count * sizeof(float), sprites.rotation.data(), GL_DYNAMIC_STORAGE_BIT);
glNamedBufferStorage(buffers[3], sprite_count * sizeof(glm::vec2), sprites.scale.data(), GL_DYNAMIC_STORAGE_BIT);
glNamedBufferStorage(buffers[4], sprite_count * sizeof(unsigned int), sprites.texid.data(), GL_DYNAMIC_STORAGE_BIT);
 
glEnableVertexArrayAttrib(vao, 0);
glVertexArrayVertexBuffer(vao, 0, buffers[0], 0, sizeof(float));
glVertexArrayBindingDivisor(vao, 0, 1);
glVertexArrayAttribFormat(vao, 0, 1, GL_FLOAT, GL_FALSE, 0);
glVertexArrayAttribBinding(vao, 0, 0);
 
glEnableVertexArrayAttrib(vao, 1);
glVertexArrayVertexBuffer(vao, 1, buffers[1], 0, sizeof(float));
glVertexArrayBindingDivisor(vao, 1, 1);
glVertexArrayAttribFormat(vao, 0, 1, GL_FLOAT, GL_FALSE, 0);
glVertexArrayAttribBinding(vao, 1, 1);
 
glEnableVertexArrayAttrib(vao, 2);
glVertexArrayVertexBuffer(vao, 2, buffers[2], 0, sizeof(float));
glVertexArrayBindingDivisor(vao, 2, 1);
glVertexArrayAttribFormat(vao, 0, 1, GL_FLOAT, GL_FALSE, 0);
glVertexArrayAttribBinding(vao, 2, 2);
 
glEnableVertexArrayAttrib(vao, 3);
glVertexArrayVertexBuffer(vao, 3, buffers[3], 0, sizeof(glm::vec2));
glVertexArrayBindingDivisor(vao, 3, 1);
glVertexArrayAttribFormat(vao, 0, 2, GL_FLOAT, GL_FALSE, 0);
glVertexArrayAttribBinding(vao, 3, 3);
 
glEnableVertexArrayAttrib(vao, 4);
glVertexArrayVertexBuffer(vao, 4, buffers[4], 0, sizeof(unsigned int));
glVertexArrayBindingDivisor(vao, 4, 1);
glVertexArrayAttribFormat(vao, 0, 1, GL_INT, GL_FALSE, 0);
glVertexArrayAttribBinding(vao, 4, 4);

 

I seem to be getting nonsensical values from pos_x, but everything else is fine.

Share this post


Link to post
Share on other sites
Advertisement

glVertexArrayAttribFormat has 0 for the index everywhere.. so guess the last one with an integer overrides the first.

Share this post


Link to post
Share on other sites

glVertexArrayAttribFormat has 0 for the index everywhere.. so guess the last one with an integer overrides the first.

 

I've changed it to this, but now I am getting nonsensical values for TextureID.

 

glEnableVertexArrayAttrib(vao, 0);
glVertexArrayVertexBuffer(vao, 0, buffers[0], 0, sizeof(float));
glVertexArrayBindingDivisor(vao, 0, 1);
glVertexArrayAttribFormat(vao, 0, 1, GL_FLOAT, GL_FALSE, 0);
glVertexArrayAttribBinding(vao, 0, 0);
 
glEnableVertexArrayAttrib(vao, 1);
glVertexArrayVertexBuffer(vao, 1, buffers[1], 0, sizeof(float));
glVertexArrayBindingDivisor(vao, 1, 1);
glVertexArrayAttribFormat(vao, 1, 1, GL_FLOAT, GL_FALSE, 0);
glVertexArrayAttribBinding(vao, 1, 1);
 
glEnableVertexArrayAttrib(vao, 2);
glVertexArrayVertexBuffer(vao, 2, buffers[2], 0, sizeof(float));
glVertexArrayBindingDivisor(vao, 2, 1);
glVertexArrayAttribFormat(vao, 2, 1, GL_FLOAT, GL_FALSE, 0);
glVertexArrayAttribBinding(vao, 2, 2);
 
glEnableVertexArrayAttrib(vao, 3);
glVertexArrayVertexBuffer(vao, 3, buffers[3], 0, sizeof(glm::vec2));
glVertexArrayBindingDivisor(vao, 3, 1);
glVertexArrayAttribFormat(vao, 3, 2, GL_FLOAT, GL_FALSE, 0);
glVertexArrayAttribBinding(vao, 3, 3);
 
glEnableVertexArrayAttrib(vao, 4);
glVertexArrayVertexBuffer(vao, 4, buffers[4], 0, sizeof(unsigned int));
glVertexArrayBindingDivisor(vao, 4, 1);
glVertexArrayAttribFormat(vao, 4, 1, GL_INT, GL_FALSE, 0);
glVertexArrayAttribBinding(vao, 4, 4);

Share this post


Link to post
Share on other sites

Try glVertexArrayAttribIFormat with the "I". Might be more functions that are different for integers... not sure right now.

 

EDIT: The reason is that it static casts to float otherwise (so float-bits will be interpreted as integer-bits in the shader), or if normalized is set to true instead converts it from its full integer range to [0, 1] or [-1, 1].

Edited by Erik Rufelt

Share this post


Link to post
Share on other sites
Sign in to follow this  

  • Advertisement