OpenGL triangle example

Started by
8 comments, last by Dark Engineer 11 years, 3 months ago
I have problem with this code because it doesn't show anything in a window, vertex and fragment shaders are loading successfully but glGetError shows something wrong is here. Please someone help me.

#include "loadshader.h"
int main()
{
	glfwInit();
	glfwOpenWindowHint(GLFW_OPENGL_VERSION_MAJOR, 3);
	glfwOpenWindowHint(GLFW_OPENGL_VERSION_MINOR, 3);
	glfwOpenWindowHint(GLFW_OPENGL_PROFILE, GLFW_OPENGL_CORE_PROFILE);
	glfwOpenWindowHint(GLFW_WINDOW_NO_RESIZE, GL_TRUE);
	glfwOpenWindow(800, 600, 0, 0, 0, 0, 0, 0, GLFW_WINDOW);
	glfwSetWindowTitle("Crion World Alpha");
	glewExperimental = GL_TRUE;
	glewInit();
	GLuint vertexBuffer;
	glGenBuffers( 1, &vertexBuffer );
	GLuint vao;
	glGenVertexArrays(1, & vao);
	glBindVertexArray( vao );
	GLuint vbo;
	glGenBuffers( 1, &vbo); //generate 1 buffer
	float vertices[] = {
		0.0f,	0.5f,
		0.5f,	-0.5f,
		-0.5f,	-0.5f
	};
	glBindBuffer( GL_ARRAY_BUFFER, vbo );
	glBufferData( GL_ARRAY_BUFFER, sizeof( vertices ), vertices, GL_STATIC_DRAW );
	// vertex shader loading
	const string vShaderString = loadShaderFile("SimpleVertexShader.vertexshader").c_str();
	const GLchar * tempVShader = vShaderString.c_str();
	GLuint vertexShader = glCreateShader( GL_VERTEX_SHADER );
	glShaderSource(vertexShader, 1, & tempVShader, NULL);
	glCompileShader( vertexShader ); 
	std::fstream logs;
	logs.open( "logs.txt", ios::in | ios::out | ios::trunc );
	char  vsbuffer[512];
	glGetShaderInfoLog( vertexShader, 512, NULL, vsbuffer );
	logs << vsbuffer;
	//fragment shader loading
	const string fShaderString = loadShaderFile("SimpleFragmentShader.fragmentshader").c_str();
	const GLchar * tempFShader = fShaderString.c_str();
	GLuint fragmentShader = glCreateShader( GL_FRAGMENT_SHADER );
	glShaderSource(fragmentShader, 1, & tempFShader, NULL );
	glCompileShader(fragmentShader);
	char  fsbuffer[512];
	glGetShaderInfoLog( fragmentShader, 512, NULL, fsbuffer );
	logs << fsbuffer;
	if(glGetError() != 0)
	{
		logs << "sth errors";
		cout << "sth wrong" << endl;
	}
	logs.close();
	GLuint shaderProgram = glCreateProgram();
	glAttachShader( shaderProgram, vertexShader );
	glAttachShader( shaderProgram, fragmentShader);
	glBindFragDataLocation( shaderProgram, 0, "outColor" );
	glLinkProgram( shaderProgram );
	glUseProgram( shaderProgram );
	GLint posAttrib = glGetAttribLocation( shaderProgram, "position" );
	glEnableVertexAttribArray( posAttrib );
	glVertexAttribPointer( posAttrib, 2, GL_FLOAT, GL_FALSE, 0, 0 );
	while( glfwGetWindowParam( GLFW_WINDOW ) )
	{
		glDrawArrays( GL_TRIANGLES, 0, 3);
		glfwSwapBuffers();
	}
	glDeleteProgram( shaderProgram );
	glDeleteShader( vertexShader );
	glDeleteShader( fragmentShader );
	glDeleteBuffers(1,  & vbo );
	glDeleteVertexArrays(1, &vao );
	glfwTerminate();
	return 0;
}
Advertisement

Something is wrong exactly where. When you say something wrong is here, it does not help. Can you be a little bit more clear.

There's missing () after glGetError. As is it's not getting called (what you have there is a comparison of the function's address).

New C/C++ Build Tool 'Stir' (doesn't just generate Makefiles, it does the build): https://github.com/space222/stir

It doesn't change anything. When i turn application on then debug error appears with Invalid allocation size: 4294967295 bytes

untitled.png

Since you are using shaders ... post them.
[size="1"]I don't suffer from insanity, I'm enjoying every minute of it.
The voices in my head may not be real, but they have some good ideas!

SimpleFragmentShader


#version 150

out vec4 outColor;

void main()
{
    outColor = vec4( 1.0, 1.0, 1.0, 1.0 );
}

SimpleVertexShader


#version 150

in vec2 position;

void main()
{
    gl_Position = vec4( position, 0.0, 1.0 );
}

and function for loading them


string loadShaderFile(const string fileName)
{
    
    ifstream shaderFile( fileName.c_str());

    //find the file size
    shaderFile.seekg(0, ios::end);
    std::streampos length = shaderFile.tellg();
    shaderFile.seekg(0, ios::beg);

    //read whole file into a vector
    vector<char> buffer(length);
    shaderFile.read(&buffer[0], length);

    //return the shader string
    return string( buffer.begin(), buffer.end());
}

I think something is wrong under 53 line but i'm begginer in OpenGL

Check that the functions you're using are returning valid data.


const GLchar * tempVShader = vShaderString.c_str();

Make sure "tempVShader" has a valid string.


char  vsbuffer[512];
glGetShaderInfoLog( vertexShader, 512, NULL, vsbuffer );
logs << vsbuffer;

Make sure "vsbuffer" has valid data and is NULL terminated.


const GLchar * tempFShader = fShaderString.c_str();

Check "tempFShader" is valid.



char  fsbuffer[512];
glGetShaderInfoLog( fragmentShader, 512, NULL, fsbuffer );
logs << fsbuffer;

Make sure "fsbuffer" has valid data and is NULL terminated.

And to be totally safe, logging should be used as follows, and after the call to glCompileShader()


GLint retParam;
glGetShaderiv(m_openGLID, GL_COMPILE_STATUS, &retParam);
if(retParam != GL_TRUE) 
{
	GLint errorMsgLen = 0;
	glGetShaderiv(m_openGLID, GL_INFO_LOG_LENGTH, &errorMsgLen);
	if (errorMsgLen > 0)
	{
		GLsizei charsWritten = 0;
		GLchar *errorMsg = new GLchar[errorMsgLen];
		glGetShaderInfoLog(m_openGLID, errorMsgLen, &charsWritten, errorMsg);

                /*Do something with errorMsg*/
	}
}
I have in console and logs that both shaders loaded successfully and program loaded successfully but still have this same error. Maybe i made something wrong with creating window.

Solved it just needed to change in loop GLFW_WINDOW to GLFW_OPENED

This topic is closed to new replies.

Advertisement