#pragma comment(lib,"sdlmain.lib")
#pragma comment(lib,"sdl.lib")
#pragma comment(lib, "OpenGL32.lib")
#pragma comment(lib, "GLu32.lib")
#pragma comment(lib, "SDL_Image/SDL_image.lib")
#include "SDL_image.h"
#include <sdl.h>
#include <sdl_opengl.h>
SDL_Surface *g_screen;
SDL_Surface *mobo;
bool quit_app = 0;
int bpp;
GLuint texture_one;
void LoadTextures()
{
//TEST
glEnable(GL_TEXTURE_2D);
mobo = IMG_Load("test.jpg");
if(mobo)
{
glGenTextures(1,&texture_one);
glBindTexture(GL_TEXTURE_2D,texture_one);
SDL_LockSurface(mobo);
glTexImage2D(GL_TEXTURE_2D, 3, 3, mobo->w, mobo->h, 0 , GL_RGB, GL_UNSIGNED_BYTE, mobo->pixels);
SDL_UnlockSurface(mobo);
SDL_FreeSurface(mobo);
glTexParameteri(GL_TEXTURE_2D,GL_TEXTURE_MIN_FILTER,GL_LINEAR_MIPMAP_NEAREST);
glTexParameteri(GL_TEXTURE_2D,GL_TEXTURE_MAG_FILTER,GL_LINEAR_MIPMAP_LINEAR);
}
else
{
printf("IMG_Load: %s\n", IMG_GetError());
}
}
bool InitSDL()
{
if(SDL_Init(SDL_INIT_VIDEO))
{
printf("Couldn't initalize SDL!");
return 0;
}
SDL_GL_SetAttribute(SDL_GL_RED_SIZE, 8);
SDL_GL_SetAttribute(SDL_GL_GREEN_SIZE, 8);
SDL_GL_SetAttribute(SDL_GL_BLUE_SIZE, 8);
SDL_GL_SetAttribute(SDL_GL_DEPTH_SIZE, 16);
SDL_GL_SetAttribute(SDL_GL_DOUBLEBUFFER, 1);
if((g_screen = SDL_SetVideoMode(640,480,32,SDL_OPENGL)) == NULL)
{
printf("Couldn't set video mode! Quitting...");
SDL_Quit();
return 0;
}
return 1;
}
void SetupProjection(int width, int height)
{
if(height == 0)
{
height = 1;
}
glViewport(0,0,width,height); //Reset viewport to new dimensions
glMatrixMode(GL_PROJECTION); //set the current matrix to projection
glLoadIdentity(); // reset projection matrix
//Calculate aspect ratio of window
gluPerspective(52.0f,(GLfloat)width/(GLfloat)height,1.0f,1000.0f);
glMatrixMode(GL_MODELVIEW); //set modelview matrix
glLoadIdentity(); //reset modelview matrix
}
bool InitOGL()
{
SetupProjection(640,480);
glEnable(GL_DEPTH_TEST);
return true;
}
void RenderScene()
{
//clear screen and depth buffer
glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT); // Clear The Screen And The Depth Buffer
glLoadIdentity(); // Reset The View
glTranslatef(-1.5f,0.0f,-6.0f); // Move Left 1.5 Units And Into The Screen 6.0
// draw a triangle
glBegin(GL_POLYGON); // start drawing a polygon
glVertex3f( 0.0f, 1.0f, 0.0f); // Top
glVertex3f( 1.0f,-1.0f, 0.0f); // Bottom Right
glVertex3f(-1.0f,-1.0f, 0.0f); // Bottom Left
glEnd(); // we're done with the polygon
glTranslatef(3.0f,0.0f,0.0f); // Move Right 3 Units
glEnable( GL_TEXTURE_2D );
glBindTexture( GL_TEXTURE_2D, texture_one );
// draw a square (quadrilateral)
glBegin(GL_QUADS);
glTexCoord2f(0.0,0.0); glVertex2f(-1.0f, 1.0f);
glTexCoord2f(0.0,1.0); glVertex2f( 1.0f, 1.0f);
glTexCoord2f(1.0,1.0); glVertex2f( 1.0f,-1.0f);
glTexCoord2f(0.0,1.0); glVertex2f(-1.0f,-1.0f);
glEnd();
// swap buffers to display, since we're double buffered.
SDL_GL_SwapBuffers();
}
int main(int argv, char **argc)
{
InitSDL();
InitOGL();
LoadTextures();
unsigned char* keys = 0;
while(!quit_app)
{
RenderScene();
SDL_Event event;
while ( SDL_PollEvent(&event) )
{
if ( event.type == SDL_QUIT )
{
quit_app = 1;
}
if ( event.type == SDL_KEYDOWN )
{
if ( event.key.keysym.sym == SDLK_ESCAPE )
{
quit_app = 1;
}
}
}
}
SDL_FreeSurface(g_screen);
SDL_Quit();
return 0;
}
SDL_Surface to OGL texture
Ok so I am using SDL for my OGL implementation cause it's way easyer. I'm using SDL_image to load a JPG into a SDL_Surface and them load that into a OGL texture object. Once I got it down and it didn't work I did some research and it seems that my implementation should work. The img loading routine is LoadTextures althought I don't think that it is the problem. Heres the WHOLE program. It's not that long. See anything wrong?
Yes 256x256 and it is in the right place. I know it's being loaded becuase I null cheak it and if it wasn't SDL would deploy it's parachute for me trying to access mobo->pixels when theres nothing there. That was happening before.
Quote:Original post by Taymo
glTexImage2D(GL_TEXTURE_2D, 3, 3, mobo->w, mobo->h, 0 , GL_RGB, GL_UNSIGNED_BYTE, mobo->pixels);
You have (mistakenly, I think) set the lod parameter of glTexImage2D to 3. Set that as 0, unless you are planning to supply a complete set of mipmaps.
Yeah, I just tested it out on my framework - setting an incorrect mipmap value will cause it to fail completely. I had assumed that it would just make the texture weird.
You might want to look into gluBuild2DMipMaps. gluBuild2DMipMaps will also convert non-power-of-2 textures to the correct dimensions.
An additional (possible) problem is the pixel format of the surface. I'm not sure how IMG_Load creates an image, but you may want to convert it to a specific format to guarentee you're using RGB. I've had weird stuff happen before, like SDL_LoadBMP returning a BGR texture, or it having an alpha value or something.
You might want to look into gluBuild2DMipMaps. gluBuild2DMipMaps will also convert non-power-of-2 textures to the correct dimensions.
An additional (possible) problem is the pixel format of the surface. I'm not sure how IMG_Load creates an image, but you may want to convert it to a specific format to guarentee you're using RGB. I've had weird stuff happen before, like SDL_LoadBMP returning a BGR texture, or it having an alpha value or something.
True words by Mushu.
You are also explicitly providing GL_RGB for the dataformat. You should check the number of bits perpixel in the SDL_Surface before providing it like that. I believe the SDL_Surface struct has a member value that contains this information. If 32 -> GL_RGBA, if 24 -> GL_RGB etc. It isn't a problem with jpg images though, always 24 bit AFAIK.
You may want to use gluBuild2DMipmaps as Mushu suggested, but if you don't make sure you don't set mipmap filtering with glTexParameter, use linear filtering instead.
You are also explicitly providing GL_RGB for the dataformat. You should check the number of bits perpixel in the SDL_Surface before providing it like that. I believe the SDL_Surface struct has a member value that contains this information. If 32 -> GL_RGBA, if 24 -> GL_RGB etc. It isn't a problem with jpg images though, always 24 bit AFAIK.
You may want to use gluBuild2DMipmaps as Mushu suggested, but if you don't make sure you don't set mipmap filtering with glTexParameter, use linear filtering instead.
Here's what I use:
In this, image is an SDL_surface loaded with IMG_Load(). I haven't tested this with every file format, but it works fine for me when I use bitmaps, tgas, and pngs. Its basically doing what Deavik said.
And if you want to display your source code(especially when its as long as that!), try using the [ source ] and [ /source ] tags around your code(without the spaces). It will format your code nicely.
if(image->format->BitsPerPixel==32) { glTexImage2D(GL_TEXTURE_2D, 0, GL_RGBA, image->w, image->h, 0, GL_RGBA, GL_UNSIGNED_BYTE, image->pixels); } else { glTexImage2D(GL_TEXTURE_2D, 0, GL_RGBA, image->w, image->h, 0, GL_BGR, GL_UNSIGNED_BYTE, image->pixels); }
In this, image is an SDL_surface loaded with IMG_Load(). I haven't tested this with every file format, but it works fine for me when I use bitmaps, tgas, and pngs. Its basically doing what Deavik said.
And if you want to display your source code(especially when its as long as that!), try using the [ source ] and [ /source ] tags around your code(without the spaces). It will format your code nicely.
You can convert any surface to a specific format (and set a colorkey) using the following method (there are others) -
Code dump -
The key is that SDL will internally "do the right stuff" with the surface formats when blitting. You can also use SDL_ConvertSurface to create a new surface with a specific pixel format, but I like to apply a colorkey too [grin]
- Load the surface.
- Create a new RGB surface with the desired format.
- Fill the new surface with pure alpha.
- Set the desired colorkey on the loaded surface.
- Blit the loaded surface onto the new surface.
- Convert the new surface to an OpenGL texture.
- Free both surfaces.
Code dump -
SDL_Surface* surf = SDL_LoadBMP( filename.c_str() );if ( !surf ) { OGLGfxAdapter::TextureType ret; ret.tex = 0; ret.w = 0; ret.h = 0; return ret;}SDL_Surface* dest = SDL_CreateRGBSurface( SDL_HWSURFACE, surf->w, surf->h, 32, #if SDL_BYTEORDER == SDL_LIL_ENDIAN // OpenGL RGBA masks 0x000000FF, 0x0000FF00, 0x00FF0000, 0xFF000000 #else 0xFF000000, 0x00FF0000, 0x0000FF00, 0x000000FF #endif );SDL_FillRect( dest, NULL, SDL_MapRGBA( dest->format, 0, 0, 0, SDL_ALPHA_TRANSPARENT ) );SDL_SetColorKey( surf, SDL_SRCCOLORKEY, SDL_MapRGB( surf->format, (Uint8)colorkey.r, (Uint8)colorkey.g, (Uint8)colorkey.b ) );SDL_BlitSurface( surf, NULL, dest, NULL );
The key is that SDL will internally "do the right stuff" with the surface formats when blitting. You can also use SDL_ConvertSurface to create a new surface with a specific pixel format, but I like to apply a colorkey too [grin]
Washu- You don't have to go through all that mess
From the SDL docs
SDL_Surface *SDL_DisplayFormatAlpha(SDL_Surface *surface);
Description
This function takes a surface and copies it to a new surface of the pixel format and colors of the video framebuffer plus an alpha channel, suitable for fast blitting onto the display surface. It calls SDL_ConvertSurface.
If you want to take advantage of hardware colorkey or alpha blit acceleration, you should set the colorkey and alpha value before calling this function.
This function can be used to convert a colorkey to an alpha channel, if the SDL_SRCCOLORKEY flag is set on the surface. The generated surface will then be transparent (alpha=0) where the pixels match the colorkey, and opaque (alpha=255) elsewhere.
Or the non Alpha version
SDL_Surface *SDL_DisplayFormat(SDL_Surface *surface);
Description
This function takes a surface and copies it to a new surface of the pixel format and colors of the video framebuffer, suitable for fast blitting onto the display surface. It calls SDL_ConvertSurface.
If you want to take advantage of hardware colorkey or alpha blit acceleration, you should set the colorkey and alpha value before calling this function.
P.S. How do you put code on this forum?
From the SDL docs
SDL_Surface *SDL_DisplayFormatAlpha(SDL_Surface *surface);
Description
This function takes a surface and copies it to a new surface of the pixel format and colors of the video framebuffer plus an alpha channel, suitable for fast blitting onto the display surface. It calls SDL_ConvertSurface.
If you want to take advantage of hardware colorkey or alpha blit acceleration, you should set the colorkey and alpha value before calling this function.
This function can be used to convert a colorkey to an alpha channel, if the SDL_SRCCOLORKEY flag is set on the surface. The generated surface will then be transparent (alpha=0) where the pixels match the colorkey, and opaque (alpha=255) elsewhere.
Or the non Alpha version
SDL_Surface *SDL_DisplayFormat(SDL_Surface *surface);
Description
This function takes a surface and copies it to a new surface of the pixel format and colors of the video framebuffer, suitable for fast blitting onto the display surface. It calls SDL_ConvertSurface.
If you want to take advantage of hardware colorkey or alpha blit acceleration, you should set the colorkey and alpha value before calling this function.
P.S. How do you put code on this forum?
This topic is closed to new replies.
Advertisement
Popular Topics
Advertisement