Hi everyone.
I'm facing a quite strange problem.
I'm initializing my OpenGL context with SDL2-2.0.3, forcing it to use the core profile ( 3.3 ), also I am using GLEW for extensions. I am using Linux Mint 16 with stock kernel 3.11.0-12-generic with proprietary nvidia drivers from the repos ( version 319.32 ).
I've got the same setup on two different machines:
Lenovo V580c laptop with GeForce 740M
and
PC with GeForce GT 630.
Also, I've got another PC with AMD Radeon HD 6670 card ( OS - Debian + opensource driver )
My OpenGL application for now renders only one rotating triangle in perpective projection with FPS-like camera floating around.
The problem is that I'm not seeing anything on my GF 630 ( on the second PC ).
The 740M doing things just fine, like the HD6670. Yet, on GF630 I'm getting a blank screen with properly cleaned color buffer and depth buffer.
Here's my init code:
void Window::Create() {
// init sdl2
if ( SDL_Init( SDL_INIT_VIDEO | SDL_INIT_TIMER | SDL_INIT_EVENTS ) ) {
Logger::WriteLog( LOG_S_ERROR, "Unable to init SDL.");
exit( -1 );
}
// set opengl 3.3
SDL_GL_SetAttribute( SDL_GL_CONTEXT_MAJOR_VERSION, 3 );
SDL_GL_SetAttribute( SDL_GL_CONTEXT_MINOR_VERSION, 3 );
SDL_GL_SetAttribute( SDL_GL_DOUBLEBUFFER, 1 );
SDL_GL_SetAttribute( SDL_GL_DEPTH_SIZE, 24 );
// create window
Uint32 flags = SDL_WINDOW_SHOWN | SDL_WINDOW_OPENGL;
window = SDL_CreateWindow( title.c_str(), SDL_WINDOWPOS_CENTERED, SDL_WINDOWPOS_CENTERED, width, height, flags );
if ( 0 == window ) {
Logger::WriteLog( LOG_S_ERROR, "Unable to create SDL window.");
exit( -1 );
}
glContext = SDL_GL_CreateContext( window );
// init glew
glewExperimental = true;
if ( GLEW_OK != glewInit() ) {
Logger::WriteLog( LOG_S_ERROR, "Unable to init GLEW." );
exit( -1 );
}
HideCursor( true );
WarpCursorXY( width / 2, height / 2 );
}
This one inits some default states
void RenderingEngine::InitGLDefaults(){
glClearColor( 0.0f, 0.0f, 0.4f, 1.0f );
glEnable( GL_DEPTH_TEST );
glDepthFunc( GL_LESS );
glEnable( GL_CULL_FACE );
glCullFace( GL_BACK );
}
This is how I render meshes:
void RenderingEngine::RenderMesh( const Mesh& mesh ) {
glEnableVertexAttribArray( 0 ); // vertices
glEnableVertexAttribArray( 1 ); // texture coords
glEnableVertexAttribArray( 2 ); // normals
glBindBuffer( GL_ARRAY_BUFFER, mesh.vbo );
glVertexAttribPointer( 0, 3, GL_FLOAT, false, VERTEX_COMPONENTS * sizeof( float ), 0 ); // vert coords
glVertexAttribPointer( 1, 2, GL_FLOAT, false, VERTEX_COMPONENTS * sizeof( float ), (char*) (3 * sizeof( float )) ); // tex coords
glVertexAttribPointer( 2, 3, GL_FLOAT, false, VERTEX_COMPONENTS * sizeof( float ), (char*) (( 3 + 2 ) * sizeof( float )) ); // normals
glBindBuffer( GL_ELEMENT_ARRAY_BUFFER, mesh.ibo );
glDrawElements( GL_TRIANGLES, mesh.drawSize, GL_UNSIGNED_INT, 0 );
glDisableVertexAttribArray( 0 );
glDisableVertexAttribArray( 1 );
glDisableVertexAttribArray( 2 );
}
Here is a part of a main cycle, which involves rendering:
void Application::Render() {
render::RenderingEngine::RenderClear();
testShader.Bind();
glm::mat4 modelMatrix = testTransform.GetModelMatrix();
glm::mat4 mvp = testCamera.GetViewProjection() * modelMatrix;
testShader.SetUniformMat4( "MVP", mvp );
render::RenderingEngine::RenderMesh( testMesh );
testShader.Unbind();
SDL_GL_SwapWindow( window.window );
}
Again: this works on 740M and HD6670, but draws a clean screen on GT630.
Halp!