Jump to content

  • Log In with Google      Sign In   
  • Create Account

OpenGL texture renders solid white


Old topic!
Guest, the last post of this topic is over 60 days old and at this point you may not reply in this topic. If you wish to continue this conversation start a new topic.

  • You cannot reply to this topic
4 replies to this topic

#1 dotBebo   Members   -  Reputation: 108

Like
0Likes
Like

Posted 19 February 2013 - 10:51 PM

Hello! I don't usually ask for help, but I'm at wit's end trying to solve this puzzle. The problem is that I am trying to test a hand-written PNG loader by rendering the image data onscreen, but no matter what I try I can't get it to show up more than a solid white quad(white is the image's background color, though. Without a texture the quad shows up black). I've checked around the internet and tried a bunch of solutions but none of them have worked so far. However, just for a sanity check, I'll go through it all one more time:

 

1. Make sure that your image isn't loaded before openGL is initialized:

//WNDPROC case WM_CREATE	
       switch (uMsg){
	case WM_CREATE:
	hdc = GetDC(hwnd);
	PIXELFORMATDESCRIPTOR pfd;
    int pixelFormat;
	memset(&pfd,0,sizeof(PIXELFORMATDESCRIPTOR));
	pfd.nSize = sizeof(PIXELFORMATDESCRIPTOR);
	pfd.nVersion = 1;
	pfd.dwFlags = PFD_DRAW_TO_WINDOW | PFD_SUPPORT_OPENGL | PFD_DOUBLEBUFFER;
	pfd.iPixelType = PFD_TYPE_RGBA;
	pfd.cColorBits = 32;
	pfd.cDepthBits = 16;
	pfd.iLayerType = PFD_MAIN_PLANE;
	pixelFormat = ChoosePixelFormat(hdc,&pfd);
	SetPixelFormat(hdc,pixelFormat,&pfd);
	hrc = wglCreateContext(hdc);
	wglMakeCurrent(hdc,hrc);
	glewInit();                            //Opengl Init somewhere right here
	wglMakeCurrent(hdc,hrc);
	Window::s_hWnd = hwnd;
	g.Initialize();                       //Image Load Here
	return 0;

2. Make sure that you have mipmaps disabled/ filtering at linear or nearest:

 

//Image. Load(), not called with constructor
       //Load Image...
	glEnable( GL_TEXTURE_2D ); 
	glActiveTexture(GL_TEXTURE0);
	glGenTextures(1,&TexID); //create TexBuffer
	glBindTexture(GL_TEXTURE_2D,TexID); //Finalize and make it current
	glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_BASE_LEVEL, 0);
    glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAX_LEVEL, 0);
	glTexParameteri(GL_TEXTURE_2D,GL_TEXTURE_MAG_FILTER,GL_NEAREST);
	glTexParameteri(GL_TEXTURE_2D,GL_TEXTURE_MIN_FILTER,GL_NEAREST);
	glTexImage2D(GL_TEXTURE_2D,0,GL_RGBA8,Width,Height,0,GL_RGBA,GL_UNSIGNED_BYTE,getImageData());
        //getImageData() returns &ImageData[0];
	glDisable( GL_TEXTURE_2D ); //I heard this solved one person's problem, no change either way.	
}

 3.Make sure your shaders are set up properly

#version 130 //fragment shader
uniform sampler2D texture0;
uniform vec3 Material; //future uniform
uniform vec3 Color;  //future uniform 
in vec2 texCoord; //matches with vertex shader
out vec4 outColor;
void main(void) {
	outColor = texture(texture0, texCoord.st);
}

The attribute for tex coords is bound at slot 2, and at runtime the uniform slot for "texture0" is 2 according to the debugger

 

4. Make sure your draw calls send tex coordinates

(Like I said before I tried this with immediate mode and it still didn't work)

//rendering loop
glEnable( GL_TEXTURE_2D ); 
glActiveTexture(GL_TEXTURE0);
//for loop{
	glBindTexture(GL_TEXTURE_2D,Store.Models.Block[i].Data->TexID);
	glUniform1i(Cam.Lens.TEXTURE_0,0);

//matrix transforms..

      glBindBuffer(GL_ARRAY_BUFFER,Store.Models.Block[i].VBO);
//stored as one big buffer, 3 vertex floats, 3 normal floats,and 2 texcoord floats per vertex
      glVertexAttribPointer(0,3,GL_FLOAT,GL_FALSE,8*sizeof(float),0); //Veticies
      glVertexAttribPointer(1,3,GL_FLOAT,GL_FALSE,8*sizeof(float),(GLvoid*)(3*sizeof(float))); //Normals
      glVertexAttribPointer(2,2,GL_FLOAT,GL_FALSE,8*sizeof(float),(GLvoid*)(6*sizeof(float))); //TexCoords

	glDrawElements(GL_TRIANGLE_STRIP,Store.Models.Block[i].Data->ILength,
		GL_UNSIGNED_SHORT,&Store.Models.Block[i].Data->Index[0]);
	}
glDisable( GL_TEXTURE_2D ); 
}//end 

5. What are you trying to draw?

//Data for the model
		float VertsNormUV[32] = {1,0,0,  0,0,1  ,0,0,
						         1,1,0,  0,0,1  ,0,1,
							 0,1,0,  0,0,1  ,1,1,
							 0,0,0,  0,0,1  ,1,0,};
		short indecies[6] = {1,2,3,1,0,3};

6 Where are you storing it?

The model data, model instances, and images are all stored in a container class owned by a global Graphics class (g), which initializes it's data at the end of the WndProc Create message(in #1).

 

On top of that, after I load the image data into the texture object, I can get the same data back out with glGetTexImage().So the texture is loading properly, and the vertecies are rendering correctly, but it still isn't textured right. The image is a small 16x16 sprite, which should work because it's a power of two.

 

Anyways, I'm struggling to figure this one out, so any help would be appreciated!



Sponsor:

#2 LorenzoGatti   Crossbones+   -  Reputation: 2705

Like
1Likes
Like

Posted 20 February 2013 - 02:58 AM

On top of that, after I load the image data into the texture object, I can get the same data back out with glGetTexImage().So the texture is loading properly, and the vertecies are rendering correctly, but it still isn't textured right.

Success with glGetTexImage() doesn't prove that you are actually using that correctly loaded texture and using it correctly.

For instance, where does texCoord in the fragment shader come from? Where is the vertex shader, and where are you actually loading and binding the shaders?


Edited by LorenzoGatti, 20 February 2013 - 02:58 AM.

Produci, consuma, crepa

#3 Waterlimon   Crossbones+   -  Reputation: 2565

Like
1Likes
Like

Posted 20 February 2013 - 03:43 AM

Are you binding the fragment color output to be used as the out color? Probably worls without but just to be sure.

Its something like glBindFragColorLocation

o3o


#4 dotBebo   Members   -  Reputation: 108

Like
0Likes
Like

Posted 20 February 2013 - 03:12 PM

Are you binding the fragment color output to be used as the out color? Probably worls without but just to be sure.

 

If I set outColor to the Color uniform, the rectangle renders in that color ( I used red), so I'd assume that OGL automatically binds the first fragment out attribute to fragment color.

 

 

 where does texCoord in the fragment shader come from? Where is the vertex shader, and where are you actually loading and binding the shaders?

The vetex shader passes texCoord:

#version 150  //vertex shader
uniform mat4 projection_matrix;
uniform mat4 modelview_matrix;
in vec3 a_Vertex;
in vec2 a_TexCoord;
in vec3 a_Normal;
out vec2 texCoord;
void main(void)
{
texCoord = a_TexCoord;  //right here
vec4 pos = modelview_matrix * vec4(a_Vertex, 1.0);
gl_Position = projection_matrix *pos;}

Both shaders are loaded from a handler class at the beginning of g.Initialize()(#1);

This is the loading code:

bool Load(string filename = "Data/Shaders/Basic"){
GLuint VertexHandle,FragmentHandle;
Program = glCreateProgram();
VertexHandle   = glCreateShader(GL_VERTEX_SHADER);
FragmentHandle = glCreateShader(GL_FRAGMENT_SHADER);
string VertCode,FragCode;
string FN =  BASEFILE + filename + ".vrt";

ifstream file(FN.c_str(),ifstream::in);
if(file.good() != true){MessageBoxA ( NULL, FN.c_str(), "Graphics Error", MB_OK );}
while(file.good()){VertCode += file.get();}
file.close(); VertCode.erase(VertCode.length()-1,1);

FN = BASEFILE + filename + ".frg";
file.open(FN.c_str(),ifstream::in);
while(file.good()){FragCode += file.get();}
file.close(); FragCode.erase(FragCode.length()-1,1);

const GLchar* vrtmp =static_cast<const GLchar*>(VertCode.c_str());
const GLchar* frtmp =static_cast<const GLchar*>(FragCode.c_str());
glShaderSource(VertexHandle,1,&vrtmp,NULL);
glShaderSource(FragmentHandle,1,&frtmp,NULL);

glCompileShader(VertexHandle);
glCompileShader(FragmentHandle);

GLint fr_CompileStatus, vr_CompileStatus, LinkStatus;

glGetShaderiv(VertexHandle,GL_COMPILE_STATUS,&vr_CompileStatus);
glGetShaderiv(FragmentHandle,GL_COMPILE_STATUS,&fr_CompileStatus);

if(vr_CompileStatus !=0 && fr_CompileStatus !=0){
glAttachShader(Program,VertexHandle);
glAttachShader(Program,FragmentHandle);

LinkStatus = 0;
glLinkProgram(Program); //Program is a member variable
glGetProgramiv(Program,GL_LINK_STATUS,&LinkStatus);
if(LinkStatus!= 0){

        //Member variables
	MODELVIEW_MATRIX = glGetUniformLocation(Program,"modelview_matrix");
	PROJECTION_MATRIX = glGetUniformLocation(Program,"projection_matrix");
	TEXTURE_0 = glGetUniformLocation(Program,"texture0");
	COLOR = glGetUniformLocation(Program,"Color");
	MATERIAL = glGetUniformLocation(Program,"Material");

	glBindAttribLocation(Program,0,"a_Vertex");
	glBindAttribLocation(Program,1,"a_Normal");
	glBindAttribLocation(Program,2,"a_TexCoord");

	return true;}
else{
	char err[100];
	glGetProgramInfoLog(Program,100,NULL,&err[0]);
	MessageBoxA ( NULL, err, "Shader Linker Error", MB_OK );return false; /*LINKER ERROR*/}
}
else{
	char err[849];
	glGetShaderInfoLog(FragmentHandle,849,NULL,&err[0]);
	MessageBoxA ( NULL, err, "Shader Compiler Error", MB_OK );return false;/*COMPILER ERROR*/}
}

It should fail gracefully if the shaders dont' compile. Also, glUseProgram() is called every frame, in a pre-paint function.



#5 dotBebo   Members   -  Reputation: 108

Like
0Likes
Like

Posted 20 February 2013 - 03:21 PM

Aha! If I set my out texCoord in my vertex shader to a_Vertex.st (its a 1 by 1 square), it renders!(Looks like there's still a couple of visual bugs my image loader, though.)

I wonder if it has something to do with my trying to send interlaced model data. Either way I know where to look for now. Thank you guys for the help!






Old topic!
Guest, the last post of this topic is over 60 days old and at this point you may not reply in this topic. If you wish to continue this conversation start a new topic.



PARTNERS