Jump to content

  • Log In with Google      Sign In   
  • Create Account


Opengl texture alpha channel


Old topic!
Guest, the last post of this topic is over 60 days old and at this point you may not reply in this topic. If you wish to continue this conversation start a new topic.

  • You cannot reply to this topic
2 replies to this topic

#1 justin12343   Members   -  Reputation: 198

Like
0Likes
Like

Posted 13 December 2012 - 12:58 PM

I'm having trouble getting the alpha channel of a texture loadded with SDL 1.3 and SDL_Image to work correctly. The problem is that according to the Texture display window in gEDebugger, the alpha value of each pixel is set to 0, causing the texture to appear fully transparent. If I disable blending or load a format that does not have an alpha channel, the texture displays just fine as expected. What could I be doing wrong?

bool OGLRenderer::Initialize(int backWidth, int backHeight, int backBpp, bool fullscreen)
{
ctx = SDL_GL_CreateContext(pWin);
	SDL_GL_SetSwapInterval(1);
glEnable( GL_TEXTURE_2D);
	glEnable( GL_BLEND);	// allow alpha blending
glBlendFunc( GL_SRC_ALPHA, GL_ONE_MINUS_SRC_ALPHA);
/*glEnable( GL_DEPTH_TEST);
	glDepthFunc( GL_LESS);*/
glDisable( GL_DEPTH_TEST);
glDisable( GL_CULL_FACE );
glDisable( GL_ALPHA_TEST);
glDisable( GL_LIGHTING);
int OpenGLVersion[2];
glGetIntegerv(GL_MAJOR_VERSION, &OpenGLVersion[0]);
glGetIntegerv(GL_MINOR_VERSION, &OpenGLVersion[1]);
printf( "Renderer initialized successfully using the OpenGL %i.%i backend.\n", OpenGLVersion[0], OpenGLVersion[1]);
int flags = IMG_INIT_PNG|IMG_INIT_JPG|IMG_INIT_TIF|IMG_INIT_WEBP;
if(IMG_Init(flags)&flags != flags)
{
  printf("SDL_Image failed to initialize.\n");
  printf("%s\n", IMG_GetError());
  return false;
}
return true;
}
GLuint OGLRenderer::create_texture( SDL_Surface *surf)
{
GLenum Mode;
if (surf->format->BytesPerPixel == 4) // contains an alpha channel
{
  if (surf->format->Rshift == 24 && surf->format->Aloss == 0 ) Mode = GL_ABGR_EXT;
   else if ( surf->format->Rshift == 16 && surf->format->Aloss == 8 ) Mode = GL_BGRA;
   else if ( surf->format->Rshift == 16 && surf->format->Ashift == 24 ) Mode = GL_BGRA;
   else if ( surf->format->Rshift == 0 && surf->format->Ashift == 24 ) Mode = GL_RGBA;
   else throw std::logic_error("Pixel Format not recognized for GL display");
}
else if (surf->format->BytesPerPixel == 3) // no alpha channel
{
  if (surf->format->Rshift == 16 ) Mode = GL_BGR;
   else if ( surf->format->Rshift == 0 ) Mode = GL_RGB;
   else throw std::logic_error("Pixel Format not recognized for GL display");
}
else throw std::logic_error("Pixel Format not recognized for GL display");

GLuint tex;
glPixelStorei( GL_UNPACK_ALIGNMENT, surf->format->BytesPerPixel);
glGenTextures( 1, &tex);
glBindTexture( GL_TEXTURE_2D, tex);
glTexImage2D(GL_TEXTURE_2D, 0, surf->format->BytesPerPixel, surf->w, surf->h, 0, Mode, GL_UNSIGNED_BYTE, surf->pixels);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_NEAREST);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_NEAREST);
return tex;
}
void OGLRenderer::draw_sprite( int index, float frame, float x, float y, float xscale, float yscale, float angle, int col)
{
if (index < 0 || index >= spriteList.size())
  return;
Sprite *tmp = spriteList[index];
if (frame < 0 || frame >= tmp->Frames.size())
  return;
if (!tmp->loaded)
  InitializeSprite( tmp);
int subImage = clamp_value((int)floor( (float)frame), 0, tmp->FrameCount-1);
unsigned int color = 0xffffffff;
unsigned int red = 255;
unsigned int blue = 255;
unsigned int green = 255;
if (col != -1)
{
  red = (col & 0x00FF0000) >> 16;
  green = (col & 0x0000FF00) >> 8;
  blue = (col & 0x000000FF);
}
Rect r;
r.left = x - tmp->CenterX;
r.top = y - tmp->CenterY;
r.right = r.left + tmp->FrameWidth;
r.bottom = r.top + tmp->FrameHeight;
OGLFrame *pFrame = reinterpret_cast<OGLFrame*>(tmp->Frames[subImage]);
glBindTexture( GL_TEXTURE_2D, pFrame->texture);
glTexEnvi(GL_TEXTURE_ENV, GL_TEXTURE_ENV_MODE, GL_MODULATE);
/*glPushMatrix();
glTranslatef( r.left, r.top, 0);
glRotatef( camera_angle, 0, 0, 0);
glScalef( xscale, yscale, 1);*/
glBegin( GL_QUADS);
glTexCoord2f( 0, 0); glColor4f( (float)red/255, (float)green/255, (float)blue/255, alpha); glVertex3f( r.left, r.top, depth);
glTexCoord2f( 1, 0); glColor4f( (float)red/255, (float)green/255, (float)blue/255, alpha); glVertex3f( r.right, r.top, depth);
glTexCoord2f( 1, 1); glColor4f( (float)red/255, (float)green/255, (float)blue/255, alpha); glVertex3f( r.right, r.bottom, depth);
glTexCoord2f( 0, 1); glColor4f( (float)red/255, (float)green/255, (float)blue/255, alpha); glVertex3f( r.left, r.bottom, depth);
glEnd();
glBindTexture(GL_TEXTURE_2D, 0);
//glPopMatrix();
}


Sponsor:

#2 Ashaman73   Crossbones+   -  Reputation: 6568

Like
0Likes
Like

Posted 14 December 2012 - 02:59 AM

In your last code fragment you use alpha but do not declare it. What happens when using an alpha value of 1.0f ?

#3 justin12343   Members   -  Reputation: 198

Like
0Likes
Like

Posted 15 December 2012 - 03:15 AM

In your last code fragment you use alpha but do not declare it. What happens when using an alpha value of 1.0f ?


The parent renderer class declares and set sets alpha to 1, so it can't be the problem.




Old topic!
Guest, the last post of this topic is over 60 days old and at this point you may not reply in this topic. If you wish to continue this conversation start a new topic.



PARTNERS