I’m working on a home project a small graphics engine written in C++11 using GLFW for windowing, but I ran into a vsync related stuttering that I was unable to fix. In the last few weeks I’ve spent countless hours trying to understand what might be wrong in the below game loop but I just can’t figure it out. Even worse is that on a system with Intel HD Graphics 4000 the animation is butter smooth even with vsync Enabled, but on my home PC with nVidia GTX480 I see a lot of animation stutter.
Game Loop:
void Engine::run()
{
if (isRunning_)
return;
isRunning_ = true;
initialize();
bool vSyncEnabled = window_->getVSyncEnabled();
int frames = 0;
double frameCounter = 0.0;
int granularity = 480;
State current = {};
current.time.t_ = 0.0;
current.time.dt_ = 1.0 / granularity;
current.time.accumulator_ = 0.0;
State previous = current;
previous.time.start_ = time_->getTime();
while (isRunning_)
{
current.time.start_ = time_->getTime();
current.time.elapsed_ = current.time.start_ - previous.time.start_;
previous.time.start_ = current.time.start_;
if (vSyncEnabled)
smoothDelta(current.time.elapsed_, granularity);
if (current.time.elapsed_ > 0.25)
current.time.elapsed_ = 0.25;
current.time.accumulator_ += current.time.elapsed_;
frameCounter += current.time.elapsed_;
///-- Update
while (current.time.accumulator_ >= current.time.dt_)
{
current.time.accumulator_ -= current.time.dt_;
previous = current;
renderManager_->update(current);
current.time.t_ += current.time.dt_;
}
///--
///-- Render
renderManager_->interpolate(previous, current);
renderManager_->render();
window_->swapBuffers();
frames++;
///--
window_->poolEvents();
if ( window_->windowShouldClose() )
{
stop();
}
}
terminate();
}
Render manager Test Code:
...
void GlRenderManager::update(const State& current)
{
prevTriangle_ = currTriangle_;
if ( currTriangle_.x_ >= 0.8 )
currTriangle_.dir_ = -1;
if ( currTriangle_.x_ <= -0.8 )
currTriangle_.dir_ = 1;
currTriangle_.x_ += currTriangle_.dir_ * current.time.dt_;
}
void GlRenderManager::interpolate(const State& previous, const State& current)
{
double alpha = current.time.accumulator_ / current.time.dt_;
if (alpha > 0.0)
{
triangle_.x_ = currTriangle_.x_ * alpha + prevTriangle_.x_ * (1.0 - alpha);
}
else
{
triangle_ = currTriangle_;
}
}
void GlRenderManager::render()
{
glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT);
GLfloat attrib[] = { (float)triangle_.x_ * 1.0f,
0.0f,
0.0f,
0.0f };
// Update the value of input attribute 0
glVertexAttrib4fv(0, attrib);
glDrawArrays(GL_PATCHES, 0, 3);
checkErrors(); ///-- Check for OpenGL errors.
}
...
Any help would be appreciated