more testing:
- ive added
[source lang="cpp"]SDL_GL_SetAttribute( SDL_GL_ACCELERATED_VISUAL, 1);[/source]
does not have any effect. I think it should be enough with SDL_OPENGL but just in case.
- i ve run the executable on a notebook with only one GPU and i keep getting the "GDI generic" for GL_RENDERER.
So maybe it's not an issue of having dual gpus, furthermore in case it was using the intel HD integrated graphics card i should be getting "Intel" as the GL_VENDOR right?
OK, one problem here is that you're using SDL_Delay to control framerate.
It's just some code i grabbed from the internet. I dont care right now about this.
For your GL context problem, I'll have a look over your code later on today and see if I can spot anything (assuming someone else doesn't come up with the solution before then).
Thanks for your time, this is the problem i want to fix!!
/* Note, there may be other things you need to change,
depending on how you have your OpenGL state set up.
*/
glPushAttrib(GL_ENABLE_BIT);
glDisable(GL_DEPTH_TEST);
glDisable(GL_CULL_FACE);
glEnable(GL_TEXTURE_2D);
/* This allows alpha blending of 2D textures with the scene */
glEnable(GL_BLEND);
glBlendFunc(GL_SRC_ALPHA, GL_ONE_MINUS_SRC_ALPHA);
/* Use the surface width and height expanded to powers of 2 */
w = power_of_two(surface->w);
h = power_of_two(surface->h);
texcoord[0] = 0.0f; /* Min X */
texcoord[1] = 0.0f; /* Min Y */
texcoord[2] = (GLfloat)surface->w / w; /* Max X */
texcoord[3] = (GLfloat)surface->h / h; /* Max Y */
/* Save the alpha blending attributes */
saved_flags = surface->flags&(SDL_SRCALPHA|SDL_RLEACCELOK);
saved_alpha = surface->format->alpha;
if ( (saved_flags & SDL_SRCALPHA) == SDL_SRCALPHA ) {
SDL_SetAlpha(surface, 0, 0);
}
/* Copy the surface into the GL texture image */
area.x = 0;
area.y = 0;
area.w = surface->w;
area.h = surface->h;
SDL_BlitSurface(surface, &area, image, &area);
/* Restore the alpha blending attributes */
if ( (saved_flags & SDL_SRCALPHA) == SDL_SRCALPHA ) {
SDL_SetAlpha(surface, saved_flags, saved_alpha);
}
/* Create an OpenGL texture for the image */
glGenTextures(1, &texture);
glBindTexture(GL_TEXTURE_2D, texture);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_NEAREST);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_NEAREST);
glTexImage2D(GL_TEXTURE_2D,
0,
GL_RGBA,
w, h,
0,
GL_RGBA,
GL_UNSIGNED_BYTE,
image->pixels);
SDL_FreeSurface(image); /* No longer needed */
return texture;
}
void DrawLogoCursor(void)
{
static GLfloat texMinX, texMinY;
static GLfloat texMaxX, texMaxY;
static int w, h;
int x, y;
if ( ! cursor_texture ) {
SDL_Surface *image;
GLfloat texcoord[4];
/* Load the image (could use SDL_image library here) */
image = SDL_LoadBMP(LOGO_FILE);
if ( image == NULL ) {
return;
}
w = image->w;
h = image->h;
/* Convert the image into an OpenGL texture */
cursor_texture = SDL_GL_LoadTexture(image, texcoord);
/* Make texture coordinates easy to understand */
texMinX = texcoord[0];
texMinY = texcoord[1];
texMaxX = texcoord[2];
texMaxY = texcoord[3];
/* We don't need the original image anymore */
SDL_FreeSurface(image);
/* Make sure that the texture conversion is okay */
if ( ! cursor_texture ) {
return;
}
}
/* Move the image around */
SDL_GetMouseState(&x, &y);
x -= w/2;
y -= h/2;
/* Show the image on the screen */
SDL_GL_Enter2DMode();
glBindTexture(GL_TEXTURE_2D, cursor_texture);
glBegin(GL_TRIANGLE_STRIP);
glTexCoord2f(texMinX, texMinY); glVertex2i(x, y );
glTexCoord2f(texMaxX, texMinY); glVertex2i(x+w, y );
glTexCoord2f(texMinX, texMaxY); glVertex2i(x, y+h);
glTexCoord2f(texMaxX, texMaxY); glVertex2i(x+w, y+h);
glEnd();
SDL_GL_Leave2DMode();
}
void DrawLogoTexture(void)
{
static GLfloat texMinX, texMinY;
static GLfloat texMaxX, texMaxY;
static int x = 0;
static int y = 0;
static int w, h;
static int delta_x = 1;
static int delta_y = 1;
SDL_Surface *screen = SDL_GetVideoSurface();
if ( ! global_texture ) {
SDL_Surface *image;
GLfloat texcoord[4];
/* Load the image (could use SDL_image library here) */
image = SDL_LoadBMP(LOGO_FILE);
if ( image == NULL ) {
return;
}
w = image->w;
h = image->h;
/* Convert the image into an OpenGL texture */
global_texture = SDL_GL_LoadTexture(image, texcoord);
/* Make texture coordinates easy to understand */
texMinX = texcoord[0];
texMinY = texcoord[1];
texMaxX = texcoord[2];
texMaxY = texcoord[3];
/* We don't need the original image anymore */
SDL_FreeSurface(image);
/* Make sure that the texture conversion is okay */
if ( ! global_texture ) {
return;
}
}
/* Move the image around */
x += delta_x;
if ( x < 0 ) {
x = 0;
delta_x = -delta_x;
} else
if ( (x+w) > screen->w ) {
x = screen->w-w;
delta_x = -delta_x;
}
y += delta_y;
if ( y < 0 ) {
y = 0;
delta_y = -delta_y;
} else
if ( (y+h) > screen->h ) {
y = screen->h-h;
delta_y = -delta_y;
}
/* Show the image on the screen */
SDL_GL_Enter2DMode();
glBindTexture(GL_TEXTURE_2D, global_texture);
glBegin(GL_TRIANGLE_STRIP);
glTexCoord2f(texMinX, texMinY); glVertex2i(x, y );
glTexCoord2f(texMaxX, texMinY); glVertex2i(x+w, y );
glTexCoord2f(texMinX, texMaxY); glVertex2i(x, y+h);
glTexCoord2f(texMaxX, texMaxY); glVertex2i(x+w, y+h);
glEnd();
SDL_GL_Leave2DMode();
}
/* This code is deprecated, but available for speed comparisons */
void DrawLogoBlit(void)
{
static int x = 0;
static int y = 0;
static int w, h;
static int delta_x = 1;
static int delta_y = 1;
/* Load the image (could use SDL_image library here) */
temp = SDL_LoadBMP(LOGO_FILE);
if ( temp == NULL ) {
return;
}
w = temp->w;
h = temp->h;
/* Convert the image into the screen format */
global_image = SDL_CreateRGBSurface(
SDL_SWSURFACE,
w, h,
screen->format->BitsPerPixel,
screen->format->Rmask,
screen->format->Gmask,
screen->format->Bmask,
screen->format->Amask);
if ( global_image ) {
SDL_BlitSurface(temp, NULL, global_image, NULL);
}
SDL_FreeSurface(temp);
/* Make sure that the texture conversion is okay */
if ( ! global_image ) {
return;
}
}
/* Move the image around
Note that we do not clear the old position. This is because we
perform a glClear() which clears the framebuffer and then only
update the new area.
Note that you can also achieve interesting effects by modifying
the screen surface alpha channel. It's set to 255 by default..
*/
x += delta_x;
if ( x < 0 ) {
x = 0;
delta_x = -delta_x;
} else
if ( (x+w) > screen->w ) {
x = screen->w-w;
delta_x = -delta_x;
}
y += delta_y;
if ( y < 0 ) {
y = 0;
delta_y = -delta_y;
} else
if ( (y+h) > screen->h ) {
y = screen->h-h;
delta_y = -delta_y;
}
dst.x = x;
dst.y = y;
dst.w = w;
dst.h = h;
SDL_BlitSurface(global_image, NULL, screen, &dst);
/* Show the image on the screen */
SDL_UpdateRects(screen, 1, &dst);
}
int RunGLTest( int argc, char* argv[],
int logo, int logocursor, int slowly, int bpp, float gamma, int noframe, int fsaa, int sync, int accel )
{
int i;
int rgb_size[3];
int w = 640;
int h = 480;
int done = 0;
int frames;
Uint32 start_time, this_time;
float color[8][3]= {{ 1.0, 1.0, 0.0},
{ 1.0, 0.0, 0.0},
{ 0.0, 0.0, 0.0},
{ 0.0, 1.0, 0.0},
{ 0.0, 1.0, 1.0},
{ 1.0, 1.0, 1.0},
{ 1.0, 0.0, 1.0},
{ 0.0, 0.0, 1.0}};
float cube[8][3]= {{ 0.5, 0.5, -0.5},
{ 0.5, -0.5, -0.5},
{-0.5, -0.5, -0.5},
{-0.5, 0.5, -0.5},
{-0.5, 0.5, 0.5},
{ 0.5, 0.5, 0.5},
{ 0.5, -0.5, 0.5},
{-0.5, -0.5, 0.5}};
Uint32 video_flags;
int value;
/* See if we should detect the display depth */
if ( bpp == 0 ) {
if ( SDL_GetVideoInfo()->vfmt->BitsPerPixel <= 8 ) {
bpp = 8;
} else {
bpp = 16; /* More doesn't seem to work */
}
}
/* Set the flags we want to use for setting the video mode */
if ( logo && USE_DEPRECATED_OPENGLBLIT ) {
video_flags = SDL_OPENGLBLIT;
} else {
video_flags = SDL_OPENGL;
}
for ( i=1; argv; ++i ) {
if ( strcmp(argv, "-fullscreen") == 0 ) {
video_flags |= SDL_FULLSCREEN;
}
}
/* Draw 2D logo onto the 3D display */
if ( logo ) {
if ( USE_DEPRECATED_OPENGLBLIT ) {
DrawLogoBlit();
} else {
DrawLogoTexture();
}
}
if ( logocursor ) {
DrawLogoCursor();
}
SDL_GL_SwapBuffers( );
/* Check for error conditions. */
gl_error = glGetError( );
/* Allow the user to see what's happening */
if ( slowly ) {
SDL_Delay( 20 );
}
/* Check if there's a pending event. */
while( SDL_PollEvent( &event ) ) {
done = HandleEvent(&event);
}
++frames;
}
/* Print out the frames per second */
this_time = SDL_GetTicks();
if ( this_time != start_time ) {
printf("%2.2f FPS\n",
((float)frames/(this_time-start_time))*1000.0);
}
int main(int argc, char *argv[])
{
int i, logo, logocursor = 0;
int numtests;
int bpp = 0;
int slowly;
float gamma = 0.0;
int noframe = 0;
int fsaa = 0;
int accel = 0;
int sync = 0;
What looks possible here is that you're requesting default modes that may not be available in hardware-accelerated versions. 16-bit modes are really really old these days (the last video card that didn't support 32-bit colour was the Voodoo 3) and there should be no reason to go looking for one - try bumping RGB to 8/8/8 and see what that gets you.
Direct3D has need of instancing, but we do not. We have plenty of glVertexAttrib calls.
the most likely cause is that you don't have a driver from nvidia. Go on the nvidia website, download and install your driver and enjoy hardware accelerated OpenGL.
I think i have spotted the problem.
In the official opengl documentation site ive found this http://www.opengl.org/archives/resources/faq/technical/mswindows.htm specially the point "5.030 How do I enable and disable hardware rendering on a Wintel card?"
It says that hardware acceleration depends on choosing a right PIXEL FORMAT. Now is it possible to do this when you are using GLUT? Which config do i need when using SDL to get hardware acceleration?
the most likely cause is that you don't have a driver from nvidia. Go on the nvidia website, download and install your driver and enjoy hardware accelerated OpenGL.
Hi Kunos,
the thing is, i already have a driver from nvidia installed. And if i run code that creates rendering context for windows it runs in the gpu!! The problem has to be with choosing the right pixel format. But ill try to download latest drivers from nvidia anyway to check it.
Both GLUT and SDL should automatically prefer to give you a hardware accelerated pixel format if one is available.
Looking at http://sdl.beuc.net/sdl.wiki/SDL_GLattr I see that SDL_GL_SetAttribute has an SDL_GL_ACCELERATED_VISUAL attribute available, so you can try using that.
Direct3D has need of instancing, but we do not. We have plenty of glVertexAttrib calls.
Looking at http://sdl.beuc.net/...wiki/SDL_GLattr I see that SDL_GL_SetAttribute has an SDL_GL_ACCELERATED_VISUAL attribute available, so you can try using that.
Already tried, nothing new . But the strangest thing is the output:
Screen BPP: 24
Vendor : Microsoft Corporation
Renderer : GDI Generic
Version : 1.1.0
Extensions : GL_WIN_swap_hint GL_EXT_bgra GL_E
you said that lesson 1 of the nehe tutorials does give you an hardware accelerated context.
looking at the tutorial, it seems to specify 16 BPP(doesn't specify bits per color component), and a 16 bit z-buffer.
don't know if that will help, but perhaps we should step back and see why that works, but sdl does not.
Yesterday i was playing a bit with the code that creates an accelerated rendering context changing the SUPPORT_OPENGL feature as well as the pixel format to PDF_FORMAT_GENERIC and i wasnt still getting HW ACCELERATION, i ll keep messing with it til i find a way to disable hardware acceleration.
That may be a way to investigate yes.
i've attached a program i wrote to test your video settings, tell me if it still gives you an generic rendering context.