Problem in 16 bit image display
Hi
With 1024*1024 chunks I could display image as huge as possible.
Thanks. 8-bit image display is not providing any problem and is well.
In case of 16-bit image, black stripes are appearing at regular interval.
I am doing texture mapping in the following manner,
void GetImageData_8(void)
{
unsigned char rgb
unsigned char *ptr;
int i,j,k,ind;
img_data_8=new unsigned char[1024*1024*4];
ptr=img_data_8;
ind=0;
for(i=0;i<1024T;i++){
for(j=0;j<1024;j++){
fin.read(&rgb,sizeof(unsigned char));
ptr[ind++]=rgb;
ptr[ind++]=rgb;
ptr[ind++]=rgb;
ptr[ind++]=255;
}
}
}
void GetImageData_16(void)
{
unsigned short rgb;
unsigned short *ptr;
int i,j,k,ind;
img_data_16=new unsigned short[1024*1024*4];
ptr=img_data_16;
ind=0;
for(i=0;i<1024;i++){
for(j=0;j<1024;j++){
fin[k].read((unsigned char*)&rgb,sizeof(unsigned short));
ptr[ind++]=rgb;
ptr[ind++]=rgb;
ptr[ind++]=rgb;
ptr[ind++]=255;
}
}
}
void SetTexture_8(void)
{
glClearColor(0.25,0.25,0.25,0.0);
glEnable(GL_DEPTH_TEST);
glShadeModel(GL_FLAT);
glPixelStorei(GL_UNPACK_ALIGNMENT,1);
glGenTextures(1,&texture_name);
glBindTexture(GL_TEXTURE_2D,texture_name);
glTexParameteri(GL_TEXTURE_2D,GL_TEXTURE_WRAP_S,GL_CLAMP);
glTexParameteri(GL_TEXTURE_2D,GL_TEXTURE_WRAP_T,GL_CLAMP);
glTexParameteri(GL_TEXTURE_2D,GL_TEXTURE_MAG_FILTER,GL_NEAREST);
glTexParameteri(GL_TEXTURE_2D,GL_TEXTURE_MIN_FILTER,GL_NEAREST);
glTexEnvf(GL_TEXTURE_ENV, GL_TEXTURE_ENV_MODE, GL_MODULATE);
glTexImage2D(GL_TEXTURE_2D,0,GL_RGBA,DEFAULT_WIDTH,DEFAULT_HEIGHT,0,GL_RGBA,GL_UNSIGNED_BYTE,img_data_8);
delete [] img_data_8;
}
void SetTexture_16(void)
{
glClearColor(0.25,0.25,0.25,0.0);
glEnable(GL_DEPTH_TEST);
glShadeModel(GL_FLAT);
glPixelStorei(GL_UNPACK_ALIGNMENT,1);
glGenTextures(1,&texture_name);
glBindTexture(GL_TEXTURE_2D,texture_name);
glTexParameteri(GL_TEXTURE_2D,GL_TEXTURE_WRAP_S,GL_CLAMP);
glTexParameteri(GL_TEXTURE_2D,GL_TEXTURE_WRAP_T,GL_CLAMP);
glTexParameteri(GL_TEXTURE_2D,GL_TEXTURE_MAG_FILTER,GL_NEAREST);
glTexParameteri(GL_TEXTURE_2D,GL_TEXTURE_MIN_FILTER,GL_NEAREST);
glTexEnvf(GL_TEXTURE_ENV, GL_TEXTURE_ENV_MODE, GL_MODULATE);
glTexImage2D(GL_TEXTURE_2D,0,GL_RGBA,DEFAULT_WIDTH,DEFAULT_HEIGHT,0,GL_RGBA,GL_UNSIGNED_SHORT,img_data_16);
delete [] img_data_16;
}
void Display(void)
{
glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT);
glEnable(GL_TEXTURE_2D);
glTexEnvf(GL_TEXTURE_ENV,GL_TEXTURE_ENV_MODE,GL_REPLACE);
glBindTexture(GL_TEXTURE_2D,texture_name);
glBegin(GL_QUADS);
glTexCoord2f(0.0,0.0);glVertex3f(0,0,0);
glTexCoord2f(1.0,0.0);glVertex3f(wind_x,0,0);
glTexCoord2f(1.0,1.0);glVertex3f(wind_x,wind_y,0);
glTexCoord2f(0.0,1.0);glVertex3f(0,wind_y,0);
glEnd();
glFlush();
glutSwapBuffers();
glDisable(GL_TEXTURE_2D);
}
void Reshape(int x,int y)
{
glViewport(0,0,x,y);
glMatrixMode(GL_PROJECTION);
glLoadIdentity();
glOrtho(0,x,y,0,-1,1);
glMatrixMode(GL_MODELVIEW);
glLoadIdentity();
}
Is there any mistake in my texture mapping? Is there anything beyond to specify other than the datatype GL_UNSIGNED_SHORT for 16-bit image???
No, there's nothing else to specify. It must be the code that generates the data for the texture.
This topic is closed to new replies.
Advertisement
Popular Topics
Advertisement