Jump to content
  • Advertisement
Sign in to follow this  
globalhost

Render to texture via the framebuffer object

This topic is 899 days old which is more than the 365 day threshold we allow for new replies. Please post a new topic.

If you intended to correct an error in the post then please contact us.

Recommended Posts

I initialize framebuffer. Then, in a cycle I render the scene to a texture, treat its shader and deduce the screen. On my PC everything is OK. (Radeon HD 7870.) On another PC (GeForce FX 5200) the function glCheckFramebufferStatusEXT returns error "8cdd" and render a black screen with a framerate of 0-1 fps.

Source code:

#include "main.hpp"

GLuint fbo, fbo_texture, rbo_depth;
GLuint vbo_fbo_vertices;
GLuint program_postproc, attribute_v_coord_postproc, uniform_fbo_texture;
GLuint vs, fs;
Shader shader;

int main(void)
{
    init();

    glGenFramebuffersEXT(1, &fbo);
    glBindFramebufferEXT(GL_FRAMEBUFFER_EXT, fbo);
    glEnable(GL_TEXTURE_2D);
    glGenTextures(1, &fbo_texture);
    glBindTexture(GL_TEXTURE_2D, fbo_texture);
    glTexImage2D(GL_TEXTURE_2D, 0, GL_RGBA, m_width, m_height, 0, GL_RGBA, GL_UNSIGNED_BYTE, 0);
    glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_NEAREST);
    glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_NEAREST);
    glFramebufferTexture2DEXT(GL_FRAMEBUFFER_EXT, GL_COLOR_ATTACHMENT0, GL_TEXTURE_2D, fbo_texture, 0);

    GLenum status;
    if ((status = glCheckFramebufferStatusEXT(GL_FRAMEBUFFER_EXT)) != GL_FRAMEBUFFER_COMPLETE_EXT) {
        fprintf(stderr, "glCheckFramebufferStatus: error %p", status);
    }
    glBindFramebufferEXT(GL_FRAMEBUFFER_EXT, 0);

    GLfloat fbo_vertices[] = { -1, -1, 1, -1,-1,  1, 1,  1 };
    glGenBuffers(1, &vbo_fbo_vertices);
    glBindBuffer(GL_ARRAY_BUFFER, vbo_fbo_vertices);
    glBufferData(GL_ARRAY_BUFFER, sizeof(fbo_vertices), fbo_vertices, GL_STATIC_DRAW);
    glBindBuffer(GL_ARRAY_BUFFER, 0);

    shader.load("shaders/post_processing.vert", "shaders/post_processing.frag");

    attribute_v_coord_postproc = glGetAttribLocation(shader.program(), "v_coord");
    if (attribute_v_coord_postproc == -1) {
        fprintf(stderr, "Could not bind attribute %s\n", "v_coord");
        return 0;
    }
    uniform_fbo_texture = glGetUniformLocation(shader.program(), "fbo_texture");
    if (uniform_fbo_texture == -1) {
        fprintf(stderr, "Could not bind uniform %s\n", "fbo_texture");
        return 0;
    }

    while (!glfwWindowShouldClose(m_window))
    {
        glBindFramebufferEXT(GL_FRAMEBUFFER_EXT, fbo);
        glClear(GL_COLOR_BUFFER_BIT);
        glBindFramebufferEXT(GL_FRAMEBUFFER_EXT, 0);
        glClear(GL_COLOR_BUFFER_BIT);
        shader.use();
        glBindTexture(GL_TEXTURE_2D, fbo_texture);
        glUniform1i(uniform_fbo_texture, /*GL_TEXTURE*/0);
        glEnableVertexAttribArray(attribute_v_coord_postproc);
        glBindBuffer(GL_ARRAY_BUFFER, vbo_fbo_vertices);
        glVertexAttribPointer(attribute_v_coord_postproc, 2, GL_FLOAT, GL_FALSE, 0, 0);
        glDrawArrays(GL_TRIANGLE_STRIP, 0, 4);
        glDisableVertexAttribArray(attribute_v_coord_postproc);
        glfwSwapBuffers(m_window);
        glfwPollEvents();
    }

    glDeleteRenderbuffersEXT(1, &rbo_depth);
    glDeleteTextures(1, &fbo_texture);
    glDeleteFramebuffersEXT(1, &fbo);
    glDeleteBuffers(1, &vbo_fbo_vertices);
    glDeleteProgram(shader.program());
    glfwDestroyWindow(m_window);
    glfwTerminate();
    exit(EXIT_SUCCESS);
}

void callbackError(int error, const char* description)
{
    fputs(description, stderr);
}

void callbackKey(GLFWwindow* window, int key, int scancode, int action, int mods)
{
    if (key == GLFW_KEY_ESCAPE && action == GLFW_PRESS)
        glfwSetWindowShouldClose(window, GL_TRUE);
}

void callbackFramebufferSize(GLFWwindow* window, int width, int height)
{
    m_width = width; m_height = height;
    glBindTexture(GL_TEXTURE_2D, fbo_texture);
    glTexImage2D(GL_TEXTURE_2D, 0, GL_RGBA, m_width, m_height, 0, GL_RGBA, GL_UNSIGNED_BYTE, 0);
    glBindTexture(GL_TEXTURE_2D, 0);
}

void init()
{
    glfwSetErrorCallback(callbackError);
    if (!glfwInit()) exit(EXIT_FAILURE);
    m_width = 800; m_height = 600;
    m_window = glfwCreateWindow(m_width, m_height, "Framebuffer Test", NULL, NULL);
    if (!m_window) { glfwTerminate(); exit(EXIT_FAILURE); }
    glfwMakeContextCurrent(m_window);
    glfwSwapInterval(0);
    glfwSetKeyCallback(m_window, callbackKey);
    glfwSetFramebufferSizeCallback(m_window, callbackFramebufferSize);
    glewExperimental = GL_TRUE;
    if (glewInit() != GLEW_OK) std::cout << "GLEW Init Error" << std::endl;
    glClearColor(0.2, 0.3, 0.4, 1.0);
    glEnable(GL_BLEND);
    glBlendFunc(GL_SRC_ALPHA, GL_ONE_MINUS_SRC_ALPHA);
}

 

Share this post


Link to post
Share on other sites
Advertisement

The GeForce FX 5200 is from an era where OpenGL specs were really fuzzy, drivers sucked; and GLSL was really, really bad. In fact NVIDIA back then went for their own custom profiles using the Cg compiler (vp10 through vp40 for vertex shaders and fp10 through fp40 for pixel shaders. vp40 & fp40 were roughly the equivalents of Shader Model 3.0; btw the GeForce FX 5200 supported Shader Model 2.0)
 
It's possible you're doing something incompatible for that card, or that you simply did nothing wrong and it's just a driver bug (oh boy there a ton back then).
I wouldn't even bother with that card. If you really want to support it, you're in for a world of pain (I'm still mentally scarred from that era)

Share this post


Link to post
Share on other sites

0x8cdd = GL_FRAMEBUFFER_UNSUPPORTED_EXT

"The combination of internal formats of the attached images does violate an implementation-dependent set of restrictions."
 

The issue with the non core version of the framebuffer extensions is that there are no supported formats guaranteed that you can use as render target. So theoretically a driver could "support" GL_EXT_framebuffer_object, but not support a single renerable texture format.

 

So in your case you could try different formats then the unsized GL_RGBA type and you probably also should use a texture dimension that is a power of two.

Share this post


Link to post
Share on other sites

you probably also should use a texture dimension that is a power of two.

 

Yes!

 

glTexImage2D(GL_TEXTURE_2D, 0, GL_RGBA, m_width, m_height, 0, GL_RGBA, GL_UNSIGNED_BYTE, 0);

 

Replaced by:

 

glTexImage2D(GL_TEXTURE_2D, 0, GL_RGBA, 512, 512, 0, GL_RGBA, GL_UNSIGNED_BYTE, 0);

 

And now it works! Thank you! 

Share this post


Link to post
Share on other sites
Sign in to follow this  

  • Advertisement
×

Important Information

By using GameDev.net, you agree to our community Guidelines, Terms of Use, and Privacy Policy.

We are the game development community.

Whether you are an indie, hobbyist, AAA developer, or just trying to learn, GameDev.net is the place for you to learn, share, and connect with the games industry. Learn more About Us or sign up!

Sign me up!