Sign in to follow this  
GavRobbs

OpenGL Bitmap Question

Recommended Posts

Here's my problem.I have a class called Texture(sorta misleading as this my program is only a bitmap example). Anyway I have the bmp loading code as follows: class Texture { public: unsigned char * data; int width,height,bits; GLuint colortype; Texture(); ~Texture(); Texture(string fp); }; Texture::Texture(string fp) { BITMAPINFOHEADER info; BITMAPFILEHEADER header; int imageIdx; RGBQUAD tempRGB; FILE * datafile = fopen(fp.c_str(),"rb"); fread(&header,sizeof(BITMAPFILEHEADER),1,datafile); if(header.bfType != 19778) { fclose(datafile); datafile = NULL; } else { DWORD size = header.bfSize - header.bfOffBits; fread(&info,sizeof(BITMAPINFOHEADER),1,datafile); fseek(datafile,header.bfOffBits,SEEK_SET); data = new unsigned char[size]; fread(data,1,size,datafile); if(!data) { MessageBox(NULL,"Bitmap loading error.","Error!",MB_OK); }; for(imageIdx = 0;imageIdx < info.biSizeImage;imageIdx += 3) { tempRGB.rgbBlue = data[imageIdx]; data[imageIdx] = data[imageIdx + 2]; data[imageIdx + 2] = tempRGB.rgbBlue; }; fclose(datafile); datafile = NULL; width = info.biWidth; height = info.biHeight; bits = info.biBitCount; colortype = GL_RGB; }; }; The following are all declared globally: HDC hDC; HGLRC hRC; HWND hWnd; Texture texture("il.bmp"); My Window Procedure is defined as follows: LRESULT CALLBACK WndProc(HWND hwnd,UINT msg,WPARAM wParam,LPARAM lParam) { switch(msg) { case WM_CREATE: { SetupOpenGL(800,600,hwnd,hDC,hRC); break; }; case WM_PAINT: { DrawTexture(); SwapBuffers(hDC); break; }; case WM_CLOSE: { wglMakeCurrent(hDC,NULL); if(!wglDeleteContext(hRC)) { MessageBox(NULL,"HRC deletion error.","Error!",MB_OK); }; wglMakeCurrent(NULL,NULL); ReleaseDC(hwnd,hDC); DestroyWindow(hwnd); break; }; case WM_DESTROY: { PostQuitMessage(0); break; }; default: return DefWindowProc(hwnd,msg,wParam,lParam); }; return 0; }; For those of you who love torture, this is the SetupOpenGL function: void SetupOpenGL(int width,int height,HWND &hwnd,HDC &hdc,HGLRC &hrc) { hdc = GetDC(hwnd); if(!hdc) { MessageBox(NULL,"HDC get error.","Error!",MB_OK); }; PIXELFORMATDESCRIPTOR pfd; memset(&pfd,0,sizeof(PIXELFORMATDESCRIPTOR)); pfd.iLayerType = PFD_MAIN_PLANE; pfd.nVersion = 1; pfd.dwFlags = PFD_DRAW_TO_WINDOW | PFD_SUPPORT_OPENGL | PFD_DOUBLEBUFFER; pfd.iLayerType = PFD_MAIN_PLANE; pfd.iPixelType = PFD_TYPE_RGBA; pfd.cColorBits = 32; pfd.cDepthBits = 32; pfd.nSize = sizeof(PIXELFORMATDESCRIPTOR); int pf = ChoosePixelFormat(hdc,&pfd); if(!SetPixelFormat(hdc,pf,&pfd)) { MessageBox(NULL,"PFD creation error.","Error!",MB_OK); }; hrc = wglCreateContext(hdc); if(!hrc) { MessageBox(NULL,"HRC creation error.","Error!",MB_OK); }; wglMakeCurrent(hdc,hrc); glViewport(0,0,width,height); glMatrixMode(GL_PROJECTION); glLoadIdentity(); glOrtho(-2.0,2.0,-2.0,2.0,1.0,1000.0); glMatrixMode(GL_MODELVIEW); glLoadIdentity(); glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT); glClearColor(0.0,0.0,0.0,0.0); SwapBuffers(hdc); }; Not very neat sorry...anyway WinMain is up next: int WINAPI WinMain(HINSTANCE hInstance,HINSTANCE hPrevInstance,LPSTR lpCmdLine,int nCmdShow) { char * cn = "My Window Class"; MSG Msg; WNDCLASSEX wc; memset(&wc,0,sizeof(WNDCLASSEX)); wc.cbSize = sizeof(WNDCLASSEX); wc.style = CS_OWNDC | CS_HREDRAW | CS_VREDRAW; wc.cbClsExtra = 0; wc.cbWndExtra = 0; wc.hbrBackground = (HBRUSH)(COLOR_WINDOW+1); wc.hCursor = LoadCursor(NULL,IDC_ARROW); wc.hIcon = LoadIcon(NULL,IDI_APPLICATION); wc.hIconSm = LoadIcon(NULL,IDI_APPLICATION); wc.hInstance = hInstance; wc.lpfnWndProc = WndProc; wc.lpszClassName = cn; wc.lpszMenuName = NULL; RegisterClassEx(&wc); hWnd = CreateWindowEx(WS_EX_CLIENTEDGE,cn,"My Window",WS_OVERLAPPEDWINDOW,0,0,800,600,NULL,NULL,hInstance,NULL); ShowWindow(hWnd,nCmdShow); UpdateWindow(hWnd); while(GetMessage(&Msg, NULL, 0, 0) > 0) { TranslateMessage(&Msg); DispatchMessage(&Msg); } return Msg.wParam; }; Now when I compile I get no errors. I run the program and I get a black window but no texture.I'm sure the texture is in the project folder.I'm sure the texture is a bitmap.I'm sure I spelt the darn thing correctly! Nothing is showing. Can someone please help me? Thanks in advance.

Share this post


Link to post
Share on other sites
I forgot one thing, I use the following in DrawTexture().

void DrawTexture()
{
glRasterPos2i(150,200); glDrawPixels(texture.width,texture.height,texture.colortype,GL_UNSIGNED_BYTE,texture.data);
};

Sorry if all this seems like a code dump, but this was probably the best way to explain it. I've tried changing glClearColor from black(not working), I've tried changing the HBRUSH color(works).Dunno what to do now.

Share this post


Link to post
Share on other sites
Hi, if you edit your posts, then copy and paste your code again between [source] ... [/source] tags, it will retain the formatting and tabs, and will be colour-coded for C++. This makes it a lot easier to read, and a lot easier to diagnose your problem.

EDIT: had a quick look over your code; have you tried forcing your window to repaint itself? Moved it, resized it, minimized or maximized it? Sounds silly, but it could be that no WM_PAINT message is sent, so your DrawTexture function is never called. Also, try commenting out that call to glRasterPos2i, and see if that has any effect. Finally, have you stepped through with the debugger? Put breakpoints on important steps in your bitmap loading code, make sure it's actually doing what you think it's doing...

Share this post


Link to post
Share on other sites

Create an account or sign in to comment

You need to be a member in order to leave a comment

Create an account

Sign up for a new account in our community. It's easy!

Register a new account

Sign in

Already have an account? Sign in here.

Sign In Now

Sign in to follow this  

  • Partner Spotlight

  • Forum Statistics

    • Total Topics
      627637
    • Total Posts
      2978335
  • Similar Content

    • By xhcao
      Before using void glBindImageTexture(    GLuint unit, GLuint texture, GLint level, GLboolean layered, GLint layer, GLenum access, GLenum format), does need to make sure that texture is completeness. 
    • By cebugdev
      hi guys, 
      are there any books, link online or any other resources that discusses on how to build special effects such as magic, lightning, etc. in OpenGL? i mean, yeah most of them are using particles but im looking for resources specifically on how to manipulate the particles to look like an effect that can be use for games,. i did fire particle before, and I want to learn how to do the other 'magic' as well.
      Like are there one book or link(cant find in google) that atleast featured how to make different particle effects in OpenGL (or DirectX)? If there is no one stop shop for it, maybe ill just look for some tips on how to make a particle engine that is flexible enough to enable me to design different effects/magic 
      let me know if you guys have recommendations.
      Thank you in advance!
    • By dud3
      How do we rotate the camera around x axis 360 degrees, without having the strange effect as in my video below? 
      Mine behaves exactly the same way spherical coordinates would, I'm using euler angles.
      Tried googling, but couldn't find a proper answer, guessing I don't know what exactly to google for, googled 'rotate 360 around x axis', got no proper answers.
       
      References:
      Code: https://pastebin.com/Hcshj3FQ
      The video shows the difference between blender and my rotation:
       
    • By Defend
      I've had a Google around for this but haven't yet found some solid advice. There is a lot of "it depends", but I'm not sure on what.
      My question is what's a good rule of thumb to follow when it comes to creating/using VBOs & VAOs? As in, when should I use multiple or when should I not? My understanding so far is that if I need a new VBO, then I need a new VAO. So when it comes to rendering multiple objects I can either:
      * make lots of VAO/VBO pairs and flip through them to render different objects, or
      * make one big VBO and jump around its memory to render different objects. 
      I also understand that if I need to render objects with different vertex attributes, then a new VAO is necessary in this case.
      If that "it depends" really is quite variable, what's best for a beginner with OpenGL, assuming that better approaches can be learnt later with better understanding?
       
    • By test opty
      Hello all,
       
      On my Windows 7 x64 machine I wrote the code below on VS 2017 and ran it.
      #include <glad/glad.h>  #include <GLFW/glfw3.h> #include <std_lib_facilities_4.h> using namespace std; void framebuffer_size_callback(GLFWwindow* window , int width, int height) {     glViewport(0, 0, width, height); } //****************************** void processInput(GLFWwindow* window) {     if (glfwGetKey(window, GLFW_KEY_ESCAPE) == GLFW_PRESS)         glfwSetWindowShouldClose(window, true); } //********************************* int main() {     glfwInit();     glfwWindowHint(GLFW_CONTEXT_VERSION_MAJOR, 3);     glfwWindowHint(GLFW_CONTEXT_VERSION_MINOR, 3);     glfwWindowHint(GLFW_OPENGL_PROFILE, GLFW_OPENGL_CORE_PROFILE);     //glfwWindowHint(GLFW_OPENGL_FORWARD_COMPAT, GL_TRUE);     GLFWwindow* window = glfwCreateWindow(800, 600, "LearnOpenGL", nullptr, nullptr);     if (window == nullptr)     {         cout << "Failed to create GLFW window" << endl;         glfwTerminate();         return -1;     }     glfwMakeContextCurrent(window);     if (!gladLoadGLLoader((GLADloadproc)glfwGetProcAddress))     {         cout << "Failed to initialize GLAD" << endl;         return -1;     }     glViewport(0, 0, 600, 480);     glfwSetFramebufferSizeCallback(window, framebuffer_size_callback);     glClearColor(0.2f, 0.3f, 0.3f, 1.0f);     glClear(GL_COLOR_BUFFER_BIT);     while (!glfwWindowShouldClose(window))     {         processInput(window);         glfwSwapBuffers(window);         glfwPollEvents();     }     glfwTerminate();     return 0; }  
      The result should be a fixed dark green-blueish color as the end of here. But the color of my window turns from black to green-blueish repeatedly in high speed! I thought it might be a problem with my Graphics card driver but I've updated it and it's: NVIDIA GeForce GTX 750 Ti.
      What is the problem and how to solve it please?
  • Popular Now