Hi,
I want to create a 1D integer texture out of 16 bit data and access this data in the geometry shader. Here's how I create the texture:
typedef struct
{
unsigned char lowerBound;
unsigned char upperBound;
} INTERVAL;
glGenTextures(1, &intervalMapTex);
glBindTexture(GL_TEXTURE_1D, intervalMapTex);
glPixelStorei(GL_UNPACK_ALIGNMENT, 2);
INTERVAL* pData = new INTERVAL[2];
pData[0].lowerBound = 1;
pData[0].upperBound = 2;
pData[1].lowerBound = 4;
pData[1].upperBound = 5;
glTexImage1D(GL_TEXTURE_1D, 0, GL_ALPHA16UI_EXT, 2, 0, GL_ALPHA_INTEGER_EXT, GL_UNSIGNED_SHORT, pData);
//glTexImage1D(GL_TEXTURE_1D, 0, GL_LUMINANCE_ALPHA8UI_EXT, 2, 0, GL_LUMINANCE_ALPHA_INTEGER_EXT, GL_UNSIGNED_SHORT, pData);
In the geometry shader I do this:
#version 120
#extension GL_EXT_gpu_shader4 : require
#extension GL_EXT_geometry_shader4 : require
uniform usampler1D intervalMap;
uvec4 v = texelFetch1D(intervalMap, 0, 0);
if(v.a == uint(0)) {
// emit 1 triangle
} else {
// emit 2 triangles
}
I would expect v.a to be 0x102, so 2 triangles are emitted. But I get only 1 triangle which means v.a == 0.
What am I doing wrong? It's the first time I work with integer textures.
Do I really need EXT_texture_integer? My Geforce 8800 GTS doesn't seem to support it, although according to the specs for EXT_texture_integer G80 chips support this extension. I use the latest official driver (186.18) on Windows XP.
TiKu