Sign in to follow this  

3D texture access violation

This topic is 2489 days old which is more than the 365 day threshold we allow for new replies. Please post a new topic.

If you intended to correct an error in the post then please contact us.

Recommended Posts

I'm not a fan of dumping code like this, but I'm getting a headache from trying to figure this out, so I'm going to ask.

I'm trying to create a 3d texture out of 4 256x256 images. My code looks like this:

[code]GLuint TextureLoader::Load3dTexture(vector<string> images, bool smooth, bool repeat)
{
unsigned int numTextures = images.size();
if (numTextures == 0)
return 0;

vector<GLubyte*> texturesData;
vector<Vec2i> texturesSizes;
texturesData.resize(numTextures);
texturesSizes.resize(numTextures);

for (unsigned int i = 0; i < numTextures; i++)
{ // collect all the data
loadImage(images[i], &(texturesData[i]), &(texturesSizes[i].x), &(texturesSizes[i].y));
}

int w = texturesSizes[0].x, h = texturesSizes[0].y;

unsigned int dataIndex = 0;
int dataSize = w*h*numTextures;
GLubyte *completeData = new GLubyte[dataSize];

for (unsigned int i = 0; i < numTextures; i++)
{
if (texturesSizes[i].x != w || texturesSizes[i].y != h)
{
for (unsigned int j = 0; j < texturesData.size(); j++)
{
delete [] texturesData[j];
}
delete []completeData;
cout << "Error loading textures for 3d texture! Different sizes!\n";
return 0;
}

// copy over the actual data
for (int k = 0; k < w*h; k++)
{
completeData[dataIndex] = texturesData[i][k];
dataIndex += 1;
}
}

GLuint TextureID;
glGenTextures(1, &TextureID);
glBindTexture(GL_TEXTURE_3D, TextureID);
glTexImage3D(GL_TEXTURE_3D, 0, GL_RGBA8, w, h, numTextures, 0, GL_RGBA, GL_UNSIGNED_BYTE, completeData);

glTexParameteri(GL_TEXTURE_3D, GL_TEXTURE_WRAP_S, repeat ? GL_REPEAT : GL_CLAMP_TO_EDGE);
glTexParameteri(GL_TEXTURE_3D, GL_TEXTURE_WRAP_T, repeat ? GL_REPEAT : GL_CLAMP_TO_EDGE);
glTexParameteri(GL_TEXTURE_3D, GL_TEXTURE_WRAP_R, repeat ? GL_REPEAT : GL_CLAMP_TO_EDGE);

glGenerateMipmap(GL_TEXTURE_3D);

glTexParameteri(GL_TEXTURE_3D, GL_TEXTURE_MIN_FILTER, smooth ? GL_LINEAR_MIPMAP_LINEAR : GL_NEAREST);
glTexParameteri(GL_TEXTURE_3D, GL_TEXTURE_MAG_FILTER, smooth ? GL_LINEAR : GL_NEAREST);

glBindTexture(GL_TEXTURE_3D, 0);

for (unsigned int j = 0; j < texturesData.size(); j++)
{
delete [] texturesData[j];
}
delete []completeData;

return TextureID;
}
[/code]

The line
[code]glTexImage3D(GL_TEXTURE_3D, 0, GL_RGBA8, w, h, numTextures, 0, GL_RGBA, GL_UNSIGNED_BYTE, completeData);[/code]
is where I get an access violation error.



The 'loadImage(..)' function works correctly, I used it earlier to load and texture using a 2d texture. I'm using GLEW for the 3d texture extension, and it's supported on my card, I checked. I enable 3d texturing via glEnable(GL_TEXTURE_3D);

The "completeData" array is exactly as big as w*h*numTextures (as you can see above). Does the glTexImage3d(...) require the pointer to be to a 3d or 2d array? Am I missing something? Any help is appreciated.

Share this post


Link to post
Share on other sites

This topic is 2489 days old which is more than the 365 day threshold we allow for new replies. Please post a new topic.

If you intended to correct an error in the post then please contact us.

Create an account or sign in to comment

You need to be a member in order to leave a comment

Create an account

Sign up for a new account in our community. It's easy!

Register a new account

Sign in

Already have an account? Sign in here.

Sign In Now

Sign in to follow this