glBindTexture() + glDrawArrays()

Started by
3 comments, last by zedzeek 17 years, 8 months ago
This seems to be a common problem, but nothing I've read so far has helped. Basically: I can create multiple textures, but only use one of them at a time. To load a texture I use the this simple function:

unsigned int build_texture (unsigned int dx, unsigned int dy, void *data)
{
    unsigned int texture_id;
   
    glGenTextures (1, &texture_id);
    glBindTexture (GL_TEXTURE_2D, texture_id);
    if (glGetError() != GL_NO_ERROR)
        texture_id = -1;
    else
    {
        glTexParameteri (GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
        glTexParameteri (GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR);
        glTexParameteri (GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_REPEAT);
        glTexParameteri (GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_REPEAT);
        glTexImage2D (GL_TEXTURE_2D, 0, GL_RGBA, dx, dy, 0, GL_RGBA, GL_UNSIGNED_BYTE, data);
        if (glGetError() != GL_NO_ERROR)
            texture_id = -1;
    }
    
    return (texture_id);
}

If I do the following: tex_one = build_texture (img1->dx, img1->dy, img1->pixels); tex_two = build_texture (img2->dx, img2->dy, img2->pixels); set_array_data (model_one); glBindTexture (GL_TEXTURE_2D, tex_one); glDrawArrays (mode, 0, vcount); Or... tex_one = build_texture (img1->dx, img1->dy, img1->pixels); tex_two = build_texture (img2->dx, img2->dy, img2->pixels); set_array_data (model_two); glBindTexture (GL_TEXTURE_2D, tex_two); glDrawArrays (mode, 0, vcount); I get the expected result. If I try to use them both: tex_one = build_texture (img1->dx, img1->dy, img1->pixels); tex_two = build_texture (img2->dx, img2->dy, img2->pixels); set_array_data (model_one); glBindTexture (GL_TEXTURE_2D, tex_one); glDrawArrays (mode, 0, vcount); set_array_data (model_two); glBindTexture (GL_TEXTURE_2D, tex_two); glDrawArrays (mode, 0, vcount); It screws up; both model_one and model_two meshes are drawn using tex_one. The usual problem seems to be that tex_two hasn't been initialised properly, and so I would expect model_one to be textured and model_two to be white (no texture), but that isn't the case. It appears I'm loading the textures ok, its just when it comes to using them, I'm somehow screwing it up. So... any suggestions?
Advertisement
If the textures are OK and the usage of them is not, why did you post the texture loading code and not the texture usage code? Post the code for set_array_data too, and any other relevant code.
Well, since my mesh is rendering correctly (if not textured...) I thought there wasn't much point. But I suppose there could be a problem there...

#include "include/GL.h"bool set_array_data (GL__Mesh *mesh){    bool rtn;    bool failed;    int offset;    int count;    int texture_id;    int i;    rtn = false;    failed = false;    // Set the error context for arguments    if (mesh != NULL)        Error__set_context (mesh->error, NULL, NULL, "set_array_data");    if (mesh != NULL)    {        // Check the mesh        if (!GL__Mesh_verify (mesh))            Error__set (mesh->error, "Invalid mesh (verify failed)", &failed);        // Set the arrays        if (!failed)        {            // Basic state enable / disable            glEnableClientState (GL_VERTEX_ARRAY);            glDisableClientState (GL_COLOR_ARRAY);            glDisableClientState (GL_TEXTURE_COORD_ARRAY);            // Vertex list            glVertexPointer (mesh->vertex_size, mesh->vertex_mode, 0, mesh->vertex_list);            if (glGetError() != GL_NO_ERROR)                Error__set (mesh->error, "Failed to set vertex list", &failed);            // Color list            if (!failed)            {                if (mesh->color_list_ambient != NULL)                {                    glColorPointer (mesh->color_size, mesh->color_mode, 0, mesh->color_list_ambient);                    if (glGetError() != GL_NO_ERROR)                        Error__set (mesh->error, "Failed to set color list", &failed);                    else                        glEnableClientState (GL_COLOR_ARRAY);                }            }            // Texture coordinates list            if (!failed)            {                if (mesh->tex_coord_list != NULL)                {                    glTexCoordPointer (mesh->tex_coord_size, mesh->tex_coord_mode, 0, mesh->tex_coord_list);                    if (glGetError() != GL_NO_ERROR)                        Error__set (mesh->error, "Failed to set texture coordinate list", &failed);                    else                    {                        glEnableClientState (GL_TEXTURE_COORD_ARRAY);                        // Activate textures                        glHint (GL_PERSPECTIVE_CORRECTION_HINT, GL_NICEST);                        glTexEnvi (GL_TEXTURE_ENV, GL_TEXTURE_ENV_MODE, GL_MODULATE);                        glEnable (GL_TEXTURE_2D);                    }                }            }        }        if (!failed)            rtn = true;    }    return (rtn);}


That's all the code there is really... unless you want me to post the structures and stuff. I suppose I'll post the GL init stuff too, but it's pretty standard...

bool GL__init (GL__Window *window, GL__RENDER_TYPE render_type){    bool rtn;    bool failed;    rtn = false;    failed = false;    // Set the error context for arguments    if (window != NULL)        Error__set_context (window->error, NULL, NULL, "GL__init");    if (window != NULL)    {        if (window->rcontrol == NULL)            Error__set (window->error, "Invalid render control for window (NULL)", &failed);        else if (render_type == GL__RENDER_TYPE_NONE)            Error__set (window->error, "Invalid render type (NONE)", &failed);        // Perform default actions        if (!failed)        {            glClearColor (0, 0, 0, 0);            glViewport (0, 0, window->width, window->height);            glBlendFunc (GL_SRC_ALPHA, GL_ONE_MINUS_SRC_ALPHA);            glEnable (GL_BLEND);        }        // Perform 2D init        if ((!failed) && (render_type == GL__RENDER_TYPE_2D))        {            window->rcontrol->render_type = GL__RENDER_TYPE_2D;            if (!GL__set_volume (window, 0, window->width,                                         0, window->height,                                         GL__DEFAULT_2D_VOLUME_NEAR,                                         GL__DEFAULT_2D_VOLUME_FAR))                Error__set (window->error, "Unable to set rendering volume", &failed);        }        // Perform 3D init        if ((!failed) && (render_type == GL__RENDER_TYPE_3D))        {            window->rcontrol->render_type = GL__RENDER_TYPE_3D;            if (!GL__set_volume (window, GL__DEFAULT_3D_VOLUME_LEFT,                                         GL__DEFAULT_3D_VOLUME_RIGHT,                                         GL__DEFAULT_3D_VOLUME_BOTTOM,                                         GL__DEFAULT_3D_VOLUME_TOP,                                         GL__DEFAULT_3D_VOLUME_NEAR,                                         GL__DEFAULT_3D_VOLUME_FAR))                Error__set (window->error, "Unable to set rendering volume", &failed);            // Camera Position            if (!failed)            {                if (!GL__set_camera (window, GL__CAMERA_POS, GL__DEFAULT_CAMERA_X,                                                             GL__DEFAULT_CAMERA_Y,                                                             GL__DEFAULT_CAMERA_Z))                    Error__set (window->error, "Unable to set camera position", &failed);            }            // Camera view            if (!failed)            {                if (!GL__set_camera (window, GL__CAMERA_VIEW, GL__DEFAULT_CAMERA_VX,                                                              GL__DEFAULT_CAMERA_VY,                                                              GL__DEFAULT_CAMERA_VZ))                    Error__set (window->error, "Unable to set camera view target", &failed);            }            // Camera normal            if (!failed)            {                if (!GL__set_camera (window, GL__CAMERA_NORMAL, GL__DEFAULT_CAMERA_NX,                                                                GL__DEFAULT_CAMERA_NY,                                                                GL__DEFAULT_CAMERA_NZ))                    Error__set (window->error, "Unable to set camera normal", &failed);            }            // Depth and shading            if (!failed)            {                glDepthMask (GL_TRUE);                glDepthFunc (GL_LESS);                glEnable (GL_DEPTH_TEST);                glShadeModel (GL_SMOOTH);                glHint (GL_PERSPECTIVE_CORRECTION_HINT, GL_NICEST);            }            // Enable culling            if (!failed)            {                glCullFace (GL_BACK);                glFrontFace (GL_CCW);                glEnable (GL_CULL_FACE);            }        }        if (!failed)            rtn = true;        else if (window->rcontrol != NULL)            window->rcontrol->render_type = GL__RENDER_TYPE_NONE;    }    return (rtn);}
Sorry, I don't see any obvious reason why the second model is drawn using the first object's texture. Not much I can say.
use glintercept , it will let u see what the textures contain at the press of a button (gldebugger most likely does the same thing)

This topic is closed to new replies.

Advertisement