SOLVED: glCheckFramebufferStatusEXT returns 0

Started by
-1 comments, last by BradDaBug 16 years, 1 month ago
I'm trying to get FBOs to work, but I can't. Here's a small demo that reproduces the problem (for me at least):
#include <iostream>
#include <cassert>
#include "glew/glew.h"
#include "SDL/SDL.h"

#define LogText(c)

int g_screenWidth, g_screenHeight;

void setupRTT(int width, int height)
{
	assert(glewIsSupported("GL_EXT_framebuffer_object") == true);

	glGetError();

	GLuint fbo;
	glGenFramebuffersEXT(1, &fbo);
	glBindFramebufferEXT(GL_FRAMEBUFFER_EXT, fbo);
	assert(glGetError() == GL_NO_ERROR);
	
	GLuint tex;
	glGenTextures(1, &tex);
	glBindTexture(GL_TEXTURE_2D, tex);
	glTexImage2D(GL_TEXTURE_2D, 0, GL_RGBA8, width, height, 0, GL_RGBA, GL_UNSIGNED_BYTE, 0);
	glFramebufferTexture2DEXT(GL_FRAMEBUFFER_EXT, GL_COLOR_ATTACHMENT0_EXT, GL_TEXTURE_2D, tex, 0);

	GLuint depth;
	glGenRenderbuffersEXT(1, &depth);
	glBindRenderbufferEXT(GL_RENDERBUFFER_EXT, depth);
	glRenderbufferStorageEXT(GL_RENDERBUFFER_EXT, GL_DEPTH_COMPONENT, width, height);
	glFramebufferRenderbufferEXT(GL_FRAMEBUFFER_EXT, GL_DEPTH_ATTACHMENT_EXT, GL_RENDERBUFFER_EXT, depth);

	assert(glGetError() == GL_NO_ERROR);
	GLenum fboStatus = glCheckFramebufferStatusEXT(fbo);
	GLenum err = glGetError();
	assert(err == GL_NO_ERROR);
	assert(fboStatus == GL_FRAMEBUFFER_COMPLETE_EXT);
}

void SetVideoMode(int width, int height, int bitsPerPixel, bool fullscreen)
{
	LogText("Setting video mode...");
	
	if (bitsPerPixel == 16)
	{
		SDL_GL_SetAttribute( SDL_GL_RED_SIZE, 5 );
		SDL_GL_SetAttribute( SDL_GL_GREEN_SIZE, 6 );
		SDL_GL_SetAttribute( SDL_GL_BLUE_SIZE, 5 );
	}
	else if (bitsPerPixel == 32)
	{
		SDL_GL_SetAttribute(SDL_GL_RED_SIZE, 8);
		SDL_GL_SetAttribute( SDL_GL_GREEN_SIZE, 8 );
		SDL_GL_SetAttribute( SDL_GL_BLUE_SIZE, 8 );
	}

	SDL_GL_SetAttribute( SDL_GL_DEPTH_SIZE, 16 );
	SDL_GL_SetAttribute( SDL_GL_DOUBLEBUFFER, 1 );

	unsigned int flags = 0;
		
	if (fullscreen == true)
		flags = SDL_FULLSCREEN;

	assert(SDL_SetVideoMode(width, height, bitsPerPixel, flags | SDL_OPENGL) != NULL);
	
	g_screenWidth = width;
	g_screenHeight = height;

	glewInit();

	setupRTT(256, 256);
}

int main(int argc, char** argv)
{
	SDL_Init(SDL_INIT_VIDEO | SDL_INIT_NOPARACHUTE);

	SetVideoMode(640, 480, 16, false);

	return 0;
}

In setupRTT() the call to glCheckFramebufferStatusEXT() returns 0, and the glGetError() right after it returns GL_INVALID_ENUM. If I comment out all the stuff about the texture and depth buffer and just call glCheckFramebufferStatusEXT() immediately after the call to glBindFramebufferEXT() I get exactly the same results. I'm stumped. Anyone have any idea what's going on? FIXED: The problem was I wasn't giving glCheckFramebufferStatusEXT() GL_FRAMEBUFFER_EXT as a parameter. Suddenly the GL_INVALID_ENUM makes perfect sense. Figures it was something stupid like that. [Edited by - BradDaBug on March 23, 2008 12:18:41 AM]
I like the DARK layout!

This topic is closed to new replies.

Advertisement