resizing texture
If i have my textures already loaded, and they are 256x256, how can I resize them with code to be something else, like 128x128?
i found gluScaleImage, but i cant get it to work, is this what i need to use?
Yes, that will scale whatever input your give and it writes the result to your output. What error code is it returning?
The program is crashing, so i dont think i am accessing the data right. This is what my load texture code is, its from a nehe tutorial
I am guessing that you cant have the same input and output source, so i put a buffer. Do i have to format the GLuint buffer into a texture before i can work with it? And how would i copy the buffer into the original "texture[0]"
#define tot_textures 20GLuint texture[tot_textures];int LoadGLTextures() // Load Bitmaps And Convert To Textures{ int Status=FALSE; // Status Indicator AUX_RGBImageRec *TextureImage[tot_textures]; // Create Storage Space For The Textures memset(TextureImage,0,sizeof(void *)*tot_textures); // Set The Pointer To NULL if ((TextureImage[0]=LoadBMP("Data/grass.bmp")) && (TextureImage[1]=LoadBMP("Data/ice.bmp")) && (TextureImage[2]=LoadBMP("Data/sword.bmp")) && (TextureImage[3]=LoadBMP("Data/bark.bmp")) && (TextureImage[4]=LoadBMP("Data/tree.bmp")) && (TextureImage[5]=LoadBMP("Data/brick.bmp")) && (TextureImage[6]=LoadBMP("Data/roof.bmp")) && (TextureImage[7]=LoadBMP("Data/rock.bmp")) && (TextureImage[8]=LoadBMP("Data/skyclouds.bmp")) && (TextureImage[9]=LoadBMP("Data/chest.bmp")) && (TextureImage[10]=LoadBMP("Data/goldbag.bmp")) && (TextureImage[11]=LoadBMP("Data/warrior.bmp")) && (TextureImage[12]=LoadBMP("Data/weapshop.bmp")) && (TextureImage[13]=LoadBMP("Data/hosshop.bmp")) && (TextureImage[14]=LoadBMP("Data/shop.bmp")) && (TextureImage[15]=LoadBMP("Data/golem.bmp")) && (TextureImage[16]=LoadBMP("Data/hellpig.bmp")) && (TextureImage[17]=LoadBMP("Data/water.bmp")) && (TextureImage[18]=LoadBMP("Data/screen.bmp")) && (TextureImage[19]=LoadBMP("Data/icons.bmp"))) ) { Status=TRUE; glGenTextures(tot_textures, &texture[0]); for (int loop=0; loop<tot_textures; loop++) { glBindTexture(GL_TEXTURE_2D, texture[loop]); glTexParameteri(GL_TEXTURE_2D,GL_TEXTURE_MAG_FILTER,GL_LINEAR); glTexParameteri(GL_TEXTURE_2D,GL_TEXTURE_MIN_FILTER,GL_LINEAR); glTexImage2D(GL_TEXTURE_2D, 0, 3, TextureImage[loop]->sizeX, TextureImage[loop]->sizeY, 0, GL_RGB, GL_UNSIGNED_BYTE, TextureImage[loop]->data); }//*********my atempt to resize the first texture**********GLuint buffer; gluScaleImage(GL_RGB,256,256,GL_UNSIGNED_INT,&texture[0],32,32,GL_UNSIGNED_INT,&buffer); } for (int loop=0; loop<tot_textures; loop++) { if (TextureImage[loop]) // If Texture Exists { if (TextureImage[loop]->data) // If Texture Image Exists { free(TextureImage[loop]->data); // Free The Texture Image Memory } free(TextureImage[loop]); // Free The Image Structure } } return Status; // Return The Status}
I am guessing that you cant have the same input and output source, so i put a buffer. Do i have to format the GLuint buffer into a texture before i can work with it? And how would i copy the buffer into the original "texture[0]"
You would need something like this
Additionally, instead of that glTexImage2D call, update it to
This tells the driver you want a specific formatting. It is more modern.
What you have is something from 1990
gluScaleImage(GL_RGB,256,256,GL_UNSIGNED_BYTE,inBuffer,32,32,GL_UNSIGNED_BYTE,outBuffer);glBindTexture(GL_TEXTURE_2D, newTexture);glTexParameteri(GL_TEXTURE_2D,GL_TEXTURE_MAG_FILTER,GL_LINEAR);glTexParameteri(GL_TEXTURE_2D,GL_TEXTURE_MIN_FILTER,GL_LINEAR);glTexImage2D(GL_TEXTURE_2D, 0, 3, TextureImage[loop]->sizeX, TextureImage[loop]->sizeY, 0, GL_RGB, GL_UNSIGNED_BYTE, outBuffer);
Additionally, instead of that glTexImage2D call, update it to
glTexImage2D(GL_TEXTURE_2D, 0, GL_RGB8, TextureImage[loop]->sizeX, TextureImage[loop]->sizeY, 0, GL_RGB, GL_UNSIGNED_BYTE, outBuffer);
This tells the driver you want a specific formatting. It is more modern.
What you have is something from 1990
It is still crashing on the gluScaleImage, I am not sure how to make the buffer, I tried the way that the normal textures are made.
also Seems to be a c-style way of doing it, should i be doing this with new and delete?
And also, is there any difference in using GLuint, and unsigned int?
also Seems to be a c-style way of doing it, should i be doing this with new and delete?
#define tot_textures 20GLuint texture[tot_textures];int LoadGLTextures() // Load Bitmaps And Convert To Textures{ int Status=FALSE; // Status Indicator AUX_RGBImageRec *TextureImage[tot_textures]; // Create Storage Space For The Textures memset(TextureImage,0,sizeof(void *)*tot_textures); // Set The Pointer To NULL if ((TextureImage[0]=LoadBMP("Data/grass.bmp")) && (TextureImage[1]=LoadBMP("Data/ice.bmp")) && (TextureImage[2]=LoadBMP("Data/sword.bmp")) && (TextureImage[3]=LoadBMP("Data/bark.bmp")) && (TextureImage[4]=LoadBMP("Data/tree.bmp")) && (TextureImage[5]=LoadBMP("Data/brick.bmp")) && (TextureImage[6]=LoadBMP("Data/roof.bmp")) && (TextureImage[7]=LoadBMP("Data/rock.bmp")) && (TextureImage[8]=LoadBMP("Data/skyclouds.bmp")) && (TextureImage[9]=LoadBMP("Data/chest.bmp")) && (TextureImage[10]=LoadBMP("Data/goldbag.bmp")) && (TextureImage[11]=LoadBMP("Data/warrior.bmp")) && (TextureImage[12]=LoadBMP("Data/weapshop.bmp")) && (TextureImage[13]=LoadBMP("Data/hosshop.bmp")) && (TextureImage[14]=LoadBMP("Data/shop.bmp")) && (TextureImage[15]=LoadBMP("Data/golem.bmp")) && (TextureImage[16]=LoadBMP("Data/hellpig.bmp")) && (TextureImage[17]=LoadBMP("Data/water.bmp")) && (TextureImage[18]=LoadBMP("Data/screen.bmp")) && (TextureImage[19]=LoadBMP("Data/icons.bmp"))) { Status=TRUE; glGenTextures(tot_textures, &texture[0]); for (int loop=0; loop<tot_textures; loop++) { AUX_RGBImageRec* buffer; //Is this the way to make the buffer? memset(buffer,0,sizeof(void*)); //should i use new? not familiar with memset gluScaleImage(GL_RGB,TextureImage[loop]->sizeX,TextureImage[loop]->sizeY,GL_UNSIGNED_BYTE,TextureImage[loop]->data,TextureImage[loop]->sizeX/4,TextureImage[loop]->sizeY/4,GL_UNSIGNED_BYTE,buffer); //CRASH glBindTexture(GL_TEXTURE_2D, texture[loop]); glTexParameteri(GL_TEXTURE_2D,GL_TEXTURE_MAG_FILTER,GL_LINEAR); glTexParameteri(GL_TEXTURE_2D,GL_TEXTURE_MIN_FILTER,GL_LINEAR); glTexImage2D(GL_TEXTURE_2D, 0, GL_RGB8, buffer->sizeX, buffer->sizeY, 0, GL_RGB, GL_UNSIGNED_BYTE, buffer); free(buffer->data); } } for (int loop=0; loop<tot_textures; loop++) { if (TextureImage[loop]) // If Texture Exists { if (TextureImage[loop]->data) // If Texture Image Exists { free(TextureImage[loop]->data); // Free The Texture Image Memory } free(TextureImage[loop]); // Free The Image Structure } } return Status; // Return The Status}
And also, is there any difference in using GLuint, and unsigned int?
It looks like you are forgetting to allocate space.
If you are coding in C, then use malloc and free. If C++, then new and delete
To allocate space with "new"
The GL spec defines GLuint to be at least 32 bits.
unsigned int size depends on your compiler.
If you are working in the 32 bit world, then both are the same.
If you are coding in C, then use malloc and free. If C++, then new and delete
To allocate space with "new"
GLubyte *buffer = new GLubyte[(TextureImage[loop]->sizeX/4*TextureImage[loop]->sizeY/4)*3];gluScaleImage(GL_RGB,TextureImage[loop]->sizeX,TextureImage[loop]->sizeY,GL_UNSIGNED_BYTE,TextureImage[loop]->data,TextureImage[loop]->sizeX/4,TextureImage[loop]->sizeY/4,GL_UNSIGNED_BYTE,buffer);//and the resulting pixels are now in buffer//use it to make a texture
The GL spec defines GLuint to be at least 32 bits.
unsigned int size depends on your compiler.
If you are working in the 32 bit world, then both are the same.
This topic is closed to new replies.
Advertisement
Popular Topics
Advertisement