• ### Announcements

#### Archived

This topic is now archived and is closed to further replies.

## 2 posts in this topic

Ok, I have tried time and time again. I have written a basic load bitmap function and load targa function, but they both don''t work at all. I''ll post the code and see if you can help me. GLImage class definition: (look at the IMAGE struct I made since that is important too)
#ifndef GLIMAGE
#define GLIMAGE

//  Define the universal bitmap id
#define BITMAP_ID 0x4D42

#include "stdio.h"	//  Holds the file manipulation routines

//  Structure that holds pertinent information about image formats
typedef struct
{
short int		width;		//  Width of the image
short int		height;		//  Height of the image
unsigned char	*data;		//  Data of the image
} IMAGE;

//  The definition fo the GLImage class
class GLImage
{
private:
FILE				*file;				//  The file pointer

//  Bitmap variables
IMAGE				*bmpImage;			//  Structure that holds pertinent information about the Windows bitmap format

//  Targa variables
IMAGE				*tgaImage;			//  Structure that holds pertinent information about the Targa image format
unsigned char		imageTypeCode;		//  The image type (i.e. 1, 2, or 3)
unsigned char		bitCount;			//  The bit count of the Targa image
unsigned char		ucharBad;			//  Garbage unsigned char data
short int			sintBad;			//  Garbage short int data
long				imageSize;			//  Size of the TGA image
int					colorMode;			//  Holds color mode - 4 for RGBA  - 3 for RGB

public:
//  Loads a Windows bitmap file

//  Loads a Targa image format file
};

#endif

Ok that was the class, here are the function declerations:
//  Include neccessary header files
#include "stdafx.h"		//  Header file for Windows
#include "GLImage.h"	//  Header file for the GLImage class

//  Loads a Windows bitmap file
{
//  Open the filename in "read binary" mode
file = fopen(filename, "rb");

//  If the file couldn''t be opened return NULL
if(file == NULL)
return NULL;

//  Verify that this is a bitmap by checking for the universal bitmap ID
{
//  This is not a bitmap so close the file and return NULL
fclose(file);

return NULL;
}

//  Move the file pointer to the beginning of the bitmap data

//  Allocate new space for the bmpImage
bmpImage = new IMAGE;

if(bmpImage == NULL)
{
//  Memory allocation failed so free the bmpImage, close the file, and return NULL
delete bmpImage;
fclose(file);

return NULL;
}

//  Allocate enough memory for the bitmap image data

//  Verify memory allocation
if(bmpImage->data == NULL)
{
//  Memory allocation failed so free the bitmapImage data, free the bmpImage, close the file, and return NULL
delete [] bmpImage->data;
delete bmpImage;
fclose(file);

return NULL;
}

//  Read in the bitmap image data

//  Make sure the bitmap image data was read
if(bmpImage->data == NULL)
{
//  Bitmap image was not read so free the bitmapImage data, free the bmpImage, close the file, and return NULL
delete [] bmpImage->data;
delete bmpImage;
fclose(file);

return NULL;
}

unsigned char tempRGB = NULL;	//  Temporary BGR->RGB swap variable

//  Swap the R and B values to get RGB since the bitmap color format is in BGR
for(int imageIdx = 0; imageIdx < bitmapInfoHeader.biSizeImage; imageIdx += 3)
{
tempRGB = bmpImage->data[imageIdx];
bmpImage->data[imageIdx] = bmpImage->data[imageIdx + 2];
bmpImage->data[imageIdx + 2] = tempRGB;
}

fclose(file);			//  Close the file

return bmpImage;		//  Return the bitmap image since it was a success
}

//  Loads a Targa image format file
{
//  Open the filename in "read binary" mode
file = fopen(filename, "rb");

//  If the file couldn''t be opened return NULL
if(file == NULL)
return NULL;

//  Read first two bytes of data we don''t need

//  Read in the image type

//  Currently supported Targa file formats are 2-Uncompressed RGB and 3-Uncompressed black and white
if(imageTypeCode != 2 && imageTypeCode != 3)
{
//  If it is not a supported type of Targa image close the file and return NULL
fclose(file);

return NULL;
}

//  Read 13 bytes of data we don''t need

//  Allocate new space for the tgaImage
tgaImage = new IMAGE;

if(tgaImage == NULL)
{
//  Memory allocation failed so free the tgaImage, close the file, and return NULL
delete tgaImage;
fclose(file);

return NULL;
}

//  Read one byte of data we don''t need

//  colorMode -> 3 = BGR, 4 = BGRA
colorMode = bitCount / 8;
imageSize = tgaImage->width * tgaImage->height * colorMode;

//  Allocate memory for the image data
tgaImage->data = new unsigned char[sizeof(unsigned char) * imageSize];

//  Verify memory allocation
if(tgaImage->data == NULL)
{
//  Memory allocation failed so free the targaImage data, free the tgaImage, close the file, and return NULL
delete [] tgaImage->data;
delete tgaImage;
fclose(file);

return NULL;
}

//  Read in the image data

//  Make sure the targa image data was read
if(tgaImage->data == NULL)
{
//  Targa image was not read so free the targaImage data, free the tgaImage, close the file, and return NULL
delete [] tgaImage->data;
delete tgaImage;
fclose(file);

return NULL;
}

unsigned char colorSwap;	//  Swap variable used to exchange the R and B values

//  Change BGR to RGB so OpenGL can read the image data
for(int imageIdx = 0; imageIdx < imageSize; imageIdx += colorMode)
{
colorSwap = tgaImage->data[imageIdx];
tgaImage->data[imageIdx] = tgaImage->data[imageIdx + 2];
tgaImage->data[imageIdx + 2] = colorSwap;
}

fclose(file);		//  Close the file

}

I think the way to use it in the code is self-explanatory if you have done this type of thing before. It goes a little like this: texture; IMAGE* image; GLImage i; // Load The Bitmap, Check For Errors, If Bitmap''s Not Found Quit if (TextureImage[0]=LoadBMP("Data/Crate.bmp")) { glGenTextures(1, &texture); glBindTexture(GL_TEXTURE_2D, texture); glTexParameteri(GL_TEXTURE_2D,GL_TEXTURE_MAG_FILTER,GL_NEAREST); glTexParameteri(GL_TEXTURE_2D,GL_TEXTURE_MIN_FILTER,GL_NEAREST); glTexImage2D(GL_TEXTURE_2D, 0, GL_RGB, image->width, image->height, 0, GL_RGB, GL_UNSIGNED_BYTE, image->data); } ..... Clear the memory ..... Etc. Ok, well if you have survived it this far ;-) I really appreciate that you took time out of your busy schedule to help me. If I was going to guess it is a pointer problem somewhere, but I can''t find it. I have also tried using the gluBuild2DMipmaps() in place of the glTexImage2D. I also get an error if the width is greater than 128. I tested it on three different comps with different video cards etc. and it still gave an error. SO, if you can help me in the smallest way my day will be a lot brighter. THANKS!
0

##### Share on other sites
Thats a lot of weird lookin code...
All I can say is, the bitmap/targa file width and height both need to be a power of two, ie 16, 32, 64, etc. Also... I wrote my own bitmap and targa loading routines and I found that some types swapped the blue and red colours round, I think it was blue and red anyway... So don''t be discouraged if you see weird colours.
0

##### Share on other sites
I had a problem with loading bitmaps as well. Per the advice of someone else here, I tried saving my bitmaps with PaintShop Pro 7 and everything work perfectly after that. You can download it free (demo) from their site. Just do a google search for PaintShop Pro and you''ll find it.

- Mike
0

• ### Similar Content

• I googled around but are unable to find source code or details of implementation.
What keywords should I search for this topic?
Things I would like to know:
A. How to ensure that partially covered pixels are rasterized?
Apparently by expanding each triangle by 1 pixel or so, rasterization problem is almost solved.
But it will result in an unindexable triangle list without tons of overlaps. Will it incur a large performance penalty?
How to ensure proper synchronizations in GLSL?
GLSL seems to only allow int32 atomics on image.
C. Is there some simple ways to estimate coverage on-the-fly?
In case I am to draw 2D shapes onto an exisitng target:
1. A multi-pass whatever-buffer seems overkill.
2. Multisampling could cost a lot memory though all I need is better coverage.
Besides, I have to blit twice, if draw target is not multisampled.

• By mapra99
Hello

I am working on a recent project and I have been learning how to code in C# using OpenGL libraries for some graphics. I have achieved some quite interesting things using TAO Framework writing in Console Applications, creating a GLUT Window. But my problem now is that I need to incorporate the Graphics in a Windows Form so I can relate the objects that I render with some .NET Controls.

To deal with this problem, I have seen in some forums that it's better to use OpenTK instead of TAO Framework, so I can use the glControl that OpenTK libraries offer. However, I haven't found complete articles, tutorials or source codes that help using the glControl or that may insert me into de OpenTK functions. Would somebody please share in this forum some links or files where I can find good documentation about this topic? Or may I use another library different of OpenTK?

Thanks!

• Hello, I have been working on SH Irradiance map rendering, and I have been using a GLSL pixel shader to render SH irradiance to 2D irradiance maps for my static objects. I already have it working with 9 3D textures so far for the first 9 SH functions.
In my GLSL shader, I have to send in 9 SH Coefficient 3D Texures that use RGBA8 as a pixel format. RGB being used for the coefficients for red, green, and blue, and the A for checking if the voxel is in use (for the 3D texture solidification shader to prevent bleeding).
My problem is, I want to knock this number of textures down to something like 4 or 5. Getting even lower would be a godsend. This is because I eventually plan on adding more SH Coefficient 3D Textures for other parts of the game map (such as inside rooms, as opposed to the outside), to circumvent irradiance probe bleeding between rooms separated by walls. I don't want to reach the 32 texture limit too soon. Also, I figure that it would be a LOT faster.
Is there a way I could, say, store 2 sets of SH Coefficients for 2 SH functions inside a texture with RGBA16 pixels? If so, how would I extract them from inside GLSL? Let me know if you have any suggestions ^^.
• By KarimIO
EDIT: I thought this was restricted to Attribute-Created GL contexts, but it isn't, so I rewrote the post.
Hey guys, whenever I call SwapBuffers(hDC), I get a crash, and I get a "Too many posts were made to a semaphore." from Windows as I call SwapBuffers. What could be the cause of this?
Update: No crash occurs if I don't draw, just clear and swap.
static PIXELFORMATDESCRIPTOR pfd = // pfd Tells Windows How We Want Things To Be { sizeof(PIXELFORMATDESCRIPTOR), // Size Of This Pixel Format Descriptor 1, // Version Number PFD_DRAW_TO_WINDOW | // Format Must Support Window PFD_SUPPORT_OPENGL | // Format Must Support OpenGL PFD_DOUBLEBUFFER, // Must Support Double Buffering PFD_TYPE_RGBA, // Request An RGBA Format 32, // Select Our Color Depth 0, 0, 0, 0, 0, 0, // Color Bits Ignored 0, // No Alpha Buffer 0, // Shift Bit Ignored 0, // No Accumulation Buffer 0, 0, 0, 0, // Accumulation Bits Ignored 24, // 24Bit Z-Buffer (Depth Buffer) 0, // No Stencil Buffer 0, // No Auxiliary Buffer PFD_MAIN_PLANE, // Main Drawing Layer 0, // Reserved 0, 0, 0 // Layer Masks Ignored }; if (!(hDC = GetDC(windowHandle))) return false; unsigned int PixelFormat; if (!(PixelFormat = ChoosePixelFormat(hDC, &pfd))) return false; if (!SetPixelFormat(hDC, PixelFormat, &pfd)) return false; hRC = wglCreateContext(hDC); if (!hRC) { std::cout << "wglCreateContext Failed!\n"; return false; } if (wglMakeCurrent(hDC, hRC) == NULL) { std::cout << "Make Context Current Second Failed!\n"; return false; } ... // OGL Buffer Initialization glClear(GL_DEPTH_BUFFER_BIT | GL_COLOR_BUFFER_BIT); glBindVertexArray(vao); glUseProgram(myprogram); glDrawElements(GL_TRIANGLES, indexCount, GL_UNSIGNED_SHORT, (void *)indexStart); SwapBuffers(GetDC(window_handle));
• By Tchom
Hey devs!

I've been working on a OpenGL ES 2.0 android engine and I have begun implementing some simple (point) lighting. I had something fairly simple working, so I tried to get fancy and added color-tinting light. And it works great... with only one or two lights. Any more than that, the application drops about 15 frames per light added (my ideal is at least 4 or 5). I know implementing lighting is expensive, I just didn't think it was that expensive. I'm fairly new to the world of OpenGL and GLSL, so there is a good chance I've written some crappy shader code. If anyone had any feedback or tips on how I can optimize this code, please let me know.