Seriously don't understand WHY my framebuffer isn't rendering as a texture

Started by
23 comments, last by Ignifex 11 years, 8 months ago
Ok so I've looked at several tutorials and checked my code but something VERY simple must be eluding me here. I render a rotating teapot to a texture and then try and render this texture as a full screen quad, just to get used to framebuffers before doing anything fancy with them.

I have tested rendering the teapot straight to the screen, that was ok. I has tested my quad shading code by making the screen yellow in a fragment shader, that was ok too. However when trying to render to the texture I just don't get the results I want! I get NOTHING!

I have a framebuffer struct, and a helper function to set up a FBO as required:


typedef struct
{
GLubyte * imageData;
GLuint bpp;
GLuint width;
GLuint height;
GLuint texID;
GLuint type;
} Texture;

typedef struct
{
GLuint handle;
GLuint depthBuffer;
GLint depthHeight;
GLint depthWidth;
Texture textureData;
Texture nonTexture;
}FBO;

void FrameBufferObject(FBO& fboData,GLenum minFilter,GLenum magFilter)
{
glGenFramebuffers(1,&fboData.handle);
glBindFramebuffer(GL_FRAMEBUFFER,fboData.handle);

glGenTextures(1,&fboData.textureData.texID);
glBindTexture(GL_TEXTURE_2D,fboData.textureData.texID);
glTexImage2D(GL_TEXTURE_2D,0,GL_RGBA,fboData.depthWidth,fboData.depthHeight,0,GL_RGBA,GL_UNSIGNED_BYTE,NULL);

glTexParameteri(GL_TEXTURE_2D,GL_TEXTURE_MIN_FILTER,GL_LINEAR);
glTexParameteri(GL_TEXTURE_2D,GL_TEXTURE_MAG_FILTER,GL_LINEAR);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE);

glGenRenderbuffers(1,&fboData.depthBuffer);
glBindRenderbuffer(GL_RENDERBUFFER,fboData.depthBuffer);
glRenderbufferStorage(GL_RENDERBUFFER,GL_DEPTH_COMPONENT,fboData.depthWidth,fboData.depthHeight);
glFramebufferRenderbuffer(GL_FRAMEBUFFER,GL_DEPTH_ATTACHMENT,GL_RENDERBUFFER,fboData.depthBuffer);

glFramebufferTexture(GL_FRAMEBUFFER,GL_COLOR_ATTACHMENT0,fboData.textureData.texID,0);
GLenum drawBufs[] = {GL_COLOR_ATTACHMENT0};
glDrawBuffers(1,drawBufs);

if(glCheckFramebufferStatus(GL_FRAMEBUFFER) != GL_FRAMEBUFFER_COMPLETE)
std::cout<<"problem with framebuffer!";
else
std::cout<<"framebuffer ok! :)";

glBindFramebuffer(GL_FRAMEBUFFER,0);
}




my shader code is as follows:

//texture.vp
#version 330
in vec3 position;

out vec2 UV;

void main()
{
gl_Position = vec4(position,1);
UV = (position.xy+vec2(1,1))/2.0;
}

//texture.fp
#version 330

in vec2 UV;

out vec3 color;

uniform sampler2D renderedTexture;

void main()
{
color = texture(renderedTexture,UV).rgb;
}



I call my rendering code as follows...


void RenderToTexture()
{
glClear(GL_COLOR_BUFFER_BIT |GL_DEPTH_BUFFER_BIT );
glUseProgram(renderProgram);
glUniformMatrix4fv(MVPMatrixUniform,1,GL_FALSE,&MVPMatrix[0][0]);

glEnableVertexAttribArray(0);

//set variables for the fbo
glBindFramebuffer(GL_FRAMEBUFFER,frameBuf.handle);
glViewport(0,0,512,512);

GLenum drawBufs[] = {GL_COLOR_ATTACHMENT0};
glDrawBuffers(1,drawBufs);
teapot->render();
glUseProgram(0);
glDisableVertexAttribArray(0);
glBindFramebuffer(GL_FRAMEBUFFER,0);
}

void DrawTextureToQuad()
{
glClear(GL_COLOR_BUFFER_BIT |GL_DEPTH_BUFFER_BIT );
glViewport(0,0,640,480);

glUseProgram(textureProgram);
glActiveTexture(GL_TEXTURE0);
glBindTexture(GL_TEXTURE_2D,frameBuf.textureData.texID);

GLuint texID = glGetUniformLocation(textureProgram, "renderedTexture");
glUniform1i(texID,0);
glEnableVertexAttribArray(0);


//Render to the screen
glBindBuffer(GL_ARRAY_BUFFER, quad_vertexbuffer);
glVertexAttribPointer(
0, // attribute 0. No particular reason for 0, but must match the layout in the shader.
3, // size
GL_FLOAT, // type
GL_FALSE, // normalized?
0, // stride
(void*)0 // array buffer offset
);
// Draw the triangle !
glDrawArrays(GL_TRIANGLES, 0, 6); // From index 0 to 3 -> 1 triangle

glDisableVertexAttribArray(0);
glUseProgram(0);
}

void DrawGLScene(GLvoid)
{
MVPMatrix = perspMatrix * camMatrix * transformMatrix;
Update();
RenderToTexture();
DrawTextureToQuad();

glutSwapBuffers();
glutPostRedisplay();
}


If anyone can help, I'd be really grateful!
Advertisement
Can you load a different texture first and verify that the shader will texture it properly, you said you try all yellow but do you try using a texture thats not the framebuffer. Everything else looks fine.

NBA2K, Madden, Maneater, Killing Floor, Sims http://www.pawlowskipinball.com/pinballeternal

Hey, thanks. Yeah I was easily able to render another texture to the screen. I really don't know what I'm doing wrong....
So when you did the textured quad test, you also were drawing to the FBO?

NBA2K, Madden, Maneater, Killing Floor, Sims http://www.pawlowskipinball.com/pinballeternal

Only thing I saw different from my code was
glFramebufferTexture vs glFramebufferTexture2D is what I use.

NBA2K, Madden, Maneater, Killing Floor, Sims http://www.pawlowskipinball.com/pinballeternal

Thanks again. I tried changing it to glFrambufferTexture2D and even tried changing some flags from GL_FRAMEBUFFER to GL_DRAW_FRAMEBUFFER in desperate hacking but no luck at all. I really don't understand what's up :(
If you can use immediate mode, I would take away everything other than making an FBO and drawing a triangle to the FBO and then drawing another triangle with the FBO applied. Get rid of shaders and everything. Sometimes you just gotta rewrite things and also get rid of unrelated code.

NBA2K, Madden, Maneater, Killing Floor, Sims http://www.pawlowskipinball.com/pinballeternal

You need to unbind the texture that is for the framebuffer, before you bind the framebuffer. As far as i see atm(on phone), ur never unbinding the texture that the framebuffer uses.
Check out https://www.facebook.com/LiquidGames for some great games made by me on the Playstation Mobile market.
hi,

here's a tiny little program that does fbos, hope this will help you. (The result you should see is a white triangle)


#include <iostream>

#ifdef _WIN32
#define WIN32_LEAN_AND_MEAN
#include <Windows.h>
#undef near
#undef far
#endif

#include "GL/glew.h" //the mighty GLEW :)
#include "SFML/Window.hpp"

/*
* Global variables
*/

sf::Window the_window;
sf::Event the_event;

float fps = 1.0f;
int frames = 0;
sf::Clock the_clock;
std::string app_path;
#define SCREEN_WIDTH 800
#define SCREEN_HEIGHT 600

GLuint tex;

GLuint fbo;

/*
* Function declarations
*/

void get_opengl_error( bool ignore = false );

void draw_quad()
{
glBegin( GL_QUADS );
glTexCoord2f( 0, 0 );
glVertex2f( 0, 0 );
glTexCoord2f( 1, 0 );
glVertex2f( 1, 0 );
glTexCoord2f( 1, 1 );
glVertex2f( 1, 1 );
glTexCoord2f( 0, 1 );
glVertex2f( 0, 1 );
glEnd();
}

void check_fbo()
{
if( glCheckFramebufferStatus( GL_FRAMEBUFFER ) != GL_FRAMEBUFFER_COMPLETE )
{
std::cerr << "FBO not complete.\n";
the_window.close();
exit( 1 );
}
}


int main( int argc, char* args[] )
{
/*
* Initialize OpenGL context
*/

the_window.create( sf::VideoMode( SCREEN_WIDTH, SCREEN_HEIGHT, 32 ), "FBO", sf::Style::Default );

if( !the_window.isOpen() )
{
std::cerr << "Couldn't initialize SFML.\n";
the_window.close();
exit( 1 );
}

GLenum glew_error = glewInit();

if( glew_error != GLEW_OK )
{
std::cerr << "Error initializing GLEW: " << glewGetErrorString( glew_error ) << "\n";
the_window.close();
exit( 1 );
}

if( !GLEW_VERSION_3_3 )
{
std::cerr << "Error: OpenGL 3.3 is required\n";
the_window.close();
exit( 1 );
}

/*
* Initialize and load textures
*/

glEnable( GL_TEXTURE_2D );

glGenTextures( 1, &tex );
glBindTexture( GL_TEXTURE_2D, tex );
glTexParameteri( GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE );
glTexParameteri( GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE );
glTexParameteri( GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR );
glTexParameteri( GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR );
glTexImage2D( GL_TEXTURE_2D, 0, GL_RGBA8, SCREEN_WIDTH, SCREEN_HEIGHT, 0, GL_RGBA, GL_FLOAT, 0 );

get_opengl_error();

/*
* Initialize FBOs
*/

GLenum modes[] = { GL_COLOR_ATTACHMENT0 };

glGenFramebuffers( 1, &fbo );
glBindFramebuffer( GL_FRAMEBUFFER, fbo );
glDrawBuffers( 1, modes );
glFramebufferTexture2D( GL_FRAMEBUFFER, GL_COLOR_ATTACHMENT0, GL_TEXTURE_2D, tex, 0 );

check_fbo();

glBindFramebuffer( GL_FRAMEBUFFER, 0 );

glBindTexture( GL_TEXTURE_2D, 0 );

get_opengl_error();


/*
* Set up matrices
*/

glViewport( 0, 0, SCREEN_WIDTH, SCREEN_HEIGHT );
glMatrixMode( GL_PROJECTION );
//x_min:0, x_max:1, y_min:0, y_max:1, z_min:0, z_max:-1
float ortho_matrix[] = { 2, 0, 0, 0,
0, 2, 0, 0,
0, 0, 2, 0,
-1, -1, -1, 1
};
glLoadMatrixf( ortho_matrix );
glMatrixMode( GL_MODELVIEW );
glLoadIdentity();

get_opengl_error();

/*
* Generate input
*/

glBindFramebuffer( GL_FRAMEBUFFER, fbo );

glClearColor( 0, 0, 0, 0 );
glClear( GL_COLOR_BUFFER_BIT );

glPushMatrix();
glTranslatef( 0.5f, 0.5f, 0.0f );
glBegin( GL_TRIANGLES );
glColor3f( 1, 1, 1 );
glVertex2f( -0.25f, -0.25f );
glVertex2f( 0, 0.25f );
glVertex2f( 0.25f, -0.25f );
glEnd();
glPopMatrix();

glBindFramebuffer( GL_FRAMEBUFFER, 0 );

get_opengl_error();

/*
* MAIN LOOP
*/

the_clock.restart();

glEnable( GL_TEXTURE_2D );

glActiveTexture( GL_TEXTURE0 );
glBindTexture( GL_TEXTURE_2D, tex );

while( true )
{
/*
* Handle events
*/

while( the_window.pollEvent( the_event ) )
{
if( the_event.type == sf::Event::Closed )
{
the_window.close();
exit( 0 );
}
}

/*
* DRAW RESULT
*/

draw_quad();

/*
* Show the result
*/

the_window.display();

frames++;

if( the_clock.getElapsedTime().asMilliseconds() > 1000.0f )
{
int timepassed = the_clock.getElapsedTime().asMilliseconds();
fps = 1000.0f / ( ( float ) timepassed / ( float ) frames );
std::cout << "FPS: " << fps << " Time: " << ( float ) timepassed / ( float ) frames << "\n";
frames = 0;
timepassed = 0;
the_clock.restart();
}
}

return 0;
}

void get_opengl_error( bool ignore )
{
bool got_error = false;
GLenum error = 0;
error = glGetError();
std::string errorstring = "";

while( error != GL_NO_ERROR )
{
if( error == GL_INVALID_ENUM )
{
//An unacceptable value is specified for an enumerated argument. The offending command is ignored and has no other side effect than to set the error flag.
errorstring += "OpenGL error: invalid enum...\n";
got_error = true;
}

if( error == GL_INVALID_VALUE )
{
//A numeric argument is out of range. The offending command is ignored and has no other side effect than to set the error flag.
errorstring += "OpenGL error: invalid value...\n";
got_error = true;
}

if( error == GL_INVALID_OPERATION )
{
//The specified operation is not allowed in the current state. The offending command is ignored and has no other side effect than to set the error flag.
errorstring += "OpenGL error: invalid operation...\n";
got_error = true;
}

if( error == GL_STACK_OVERFLOW )
{
//This command would cause a stack overflow. The offending command is ignored and has no other side effect than to set the error flag.
errorstring += "OpenGL error: stack overflow...\n";
got_error = true;
}

if( error == GL_STACK_UNDERFLOW )
{
//This command would cause a stack underflow. The offending command is ignored and has no other side effect than to set the error flag.
errorstring += "OpenGL error: stack underflow...\n";
got_error = true;
}

if( error == GL_OUT_OF_MEMORY )
{
//There is not enough memory left to execute the command. The state of the GL is undefined, except for the state of the error flags, after this error is recorded.
errorstring += "OpenGL error: out of memory...\n";
got_error = true;
}

if( error == GL_TABLE_TOO_LARGE )
{
//The specified table exceeds the implementation's maximum supported table size. The offending command is ignored and has no other side effect than to set the error flag.
errorstring += "OpenGL error: table too large...\n";
got_error = true;
}

error = glGetError();
}

if( got_error && !ignore )
{
std::cerr << errorstring;
the_window.close();
return;
}
}



You need to unbind the texture that is for the framebuffer, before you bind the framebuffer. As far as i see atm(on phone), ur never unbinding the texture that the framebuffer uses.


I tried that in a session last night, but still no luck....

Thanks though! ;)

This topic is closed to new replies.

Advertisement