• Advertisement
Sign in to follow this  

[SDL/OGL] Invalid Enum?

This topic is 3951 days old which is more than the 365 day threshold we allow for new replies. Please post a new topic.

If you intended to correct an error in the post then please contact us.

Recommended Posts

I'm getting the GL_INVALID_ENUM error in this code, and I have no idea what it means.
Image *VideoDriver::loadImage( std::string filename )
{
	SDL_Surface *tmp = IMG_Load( filename.c_str() );
	GLuint texture;
	
	if(tmp == NULL)
	{
		logger->write( "Faild to load image: %s", filename.c_str() );
		return false;
	}

	if( ( tmp->w & (tmp->w - 1)) != 0 )
	{
		logger->write( "warning: %s's width is not a power of 2.", filename.c_str() );
	}

	if( (tmp->h & (tmp->h -1)) != 0 )
	{
		logger->write( "warning: %s's height is not a power of 2.", filename.c_str() );
	}

	glGenTextures( 1, &texture );
	glBindTexture( GL_TEXTURE_2D, texture );

	glTexImage2D( GL_TEXTURE_2D, 0, 4, tmp->w, tmp->h, 0, GL_RGBA, GL_UNSIGNED_BYTE, tmp->pixels );

	glTexParameteri( GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_NEAREST );
	glTexParameteri( GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_NEAREST );

	Image *img = new Image( texture );

	SDL_FreeSurface( tmp );

    GLenum error = glGetError();
    if (error)
    {
        std::string errmsg = "Unkown error";
        switch (error)
        {
            case GL_INVALID_ENUM:
                errmsg = "GL_INVALID_ENUM";
                break;
            case GL_INVALID_VALUE:
                errmsg = "GL_INVALID_VALUE";
                break;
            case GL_INVALID_OPERATION:
                errmsg = "GL_INVALID_OPERATION";
                break;
            case GL_STACK_OVERFLOW:
                errmsg = "GL_STACK_OVERFLOW";
                break;
            case GL_STACK_UNDERFLOW:
                errmsg = "GL_STACK_UNDERFLOW";
                break;
            case GL_OUT_OF_MEMORY:
                errmsg = "GL_OUT_OF_MEMORY";
                break;
        }
        logger->write("Error: Image GL import failed: %s", errmsg.c_str());
	}

	return img;
}

Share this post


Link to post
Share on other sites
Advertisement
The first step I'd take would be to put error checks after all GL calls. This'll narrow it down to the specific call, making it much easier to check the paramters used. If doing so still gets you nowhere, let us know exactly which call is the culprit.

Share this post


Link to post
Share on other sites
Ok I found it, but for some reason my image won't draw. loadImage is the same as above

drawImage

void VideoDriver::drawImage( Image *image, Rect &src, Rect &dest )
{
if(image == NULL)
return;

float texX1 = src.x / (float)image->getWidth();
float texY1 = src.y / (float)image->getHeight();
float texX2 = (src.x + src.width) / (float)image->getWidth();
float texY2 = (src.y + src.height) / (float)image->getHeight();

glBindTexture( GL_TEXTURE_2D, image->getTexture() );

glEnable( GL_TEXTURE_2D );

glBegin( GL_QUADS );
glTexCoord2f( texX1, texY1 );
glVertex3f( dest.x, dest.y, 0 );

glTexCoord2f( texX2, texY1 );
glVertex3f( dest.x + src.width, dest.y, 0 );

glTexCoord2f( texX2, texY2 );
glVertex3f( dest.x + src.width, dest.y + src.height, 0 );

glTexCoord2f( texX1, texY2 );
glVertex3f( dest.x, dest.y + src.height, 0 );
glEnd();

glDisable( GL_TEXTURE_2D );

}


Other stuff that might be of importance

bool VideoDriver::setVideoMode( int width, int height, bool fullscreen )
{
mWidth = width;
mHeight = height;

if(SDL_Init( SDL_INIT_VIDEO ) != 0)
{
logger->write( "Unable to start SDL" );
return false;
}

SDL_EnableUNICODE(1);
SDL_EnableKeyRepeat(SDL_DEFAULT_REPEAT_DELAY, SDL_DEFAULT_REPEAT_INTERVAL);
SDL_WM_SetCaption("Divulge Online", NULL);

logger->write( "Setting video mode: %dx%dx32 %s", mWidth, mHeight, fullscreen ? "fullscreen" : "windowed" );

// SDL Video Flags
Uint32 videoFlags = SDL_ANYFORMAT | SDL_OPENGL;

if(fullscreen)
videoFlags |= SDL_FULLSCREEN;

// We want double buffering
SDL_GL_SetAttribute( SDL_GL_DOUBLEBUFFER, 1 );

mScreen = SDL_SetVideoMode( mWidth, mHeight, 32, videoFlags );

if(!mScreen)
{
logger->write( "Unable to set video mode." );
return false;
}

// Color to clear to
glClearColor( 0, 0, 0, 0 );

glViewport( 0, 0, mWidth, mHeight );
glHint(GL_PERSPECTIVE_CORRECTION_HINT, GL_FASTEST);

//Set OpenGL State
glMatrixMode( GL_MODELVIEW );
glPushMatrix();
glLoadIdentity();

glEnable( GL_TEXTURE_2D );
glPushMatrix();
glLoadIdentity();

glMatrixMode( GL_PROJECTION );
glPushMatrix();
glLoadIdentity();

glEnable( GL_BLEND );

glOrtho( 0, mWidth, mHeight, 0, -1, 1 );

glEnable( GL_SCISSOR_TEST );

glBlendFunc(GL_SRC_ALPHA, GL_ONE_MINUS_SRC_ALPHA);

return true;
}

void VideoDriver::beginScene()
{
// Clear the screen
glClear( GL_COLOR_BUFFER_BIT );
}

void VideoDriver::endScene()
{

SDL_GL_SwapBuffers();
}



Share this post


Link to post
Share on other sites
When you say your image doesn't draw, do you mean that the texture doesn't show up (the quad is drawn in white or some other solid color), the quad doesn't show up, or something else? What differs if you don't try to texture it?

One thing that stands out is the method of coming up with texture coordinates. Are you sure they're within 0.0 and 1.0? If not, your results could be wonky. You may want to hard code coordinates you know should work and see if that fixes things; it's usually customary to use {(0.0, 0.0), (1.0, 0.0), (1.0, 1.0), (0.0, 1.0)} for texturing quads, anyway.

Share this post


Link to post
Share on other sites
Well..found the problem. I wasn't setting the images width/height in the new Image class. Thanks for your help.

Share this post


Link to post
Share on other sites
Sign in to follow this  

  • Advertisement