X11 + OpenGL causes segfault

Started by
21 comments, last by KarimIO 8 years ago

I'm having issues getting X11 to work with OpenGL. I've tried everything! It used to work with GLFW but I'm trying to get it to work with GLX instead, and Win32 currently works, so this is very system-dependent. Why does the program segfault when it reaches either glClear, glClearColor, or glGetString? Thanks in advance!

This all lies inside a function, Initialize(), of a class X11Window. There is barely any code that happens before. I have NVidia drivers installed.

display = XOpenDisplay(NULL);
if (display == NULL) {
std::cout << "Could not open display\n";
return 1;
}
screen = DefaultScreenOfDisplay(display);
screenId = DefaultScreen(display);


// Check GLX version
GLint majorGLX, minorGLX = 0;
glXQueryVersion(display, &majorGLX, &minorGLX);
if (majorGLX <= 1 && minorGLX < 2) {
std::cout << "GLX 1.2 or greater is required.\n";
XCloseDisplay(display);
return 1;
}
else {
std::cout << "GLX client version: " << glXGetClientString(display, GLX_VERSION) << '\n';
std::cout << "GLX client vendor: " << glXGetClientString(display, GLX_VENDOR) << "\n";
std::cout << "GLX client extensions:\n\t" << glXGetClientString(display, GLX_EXTENSIONS) << "\n";

std::cout << "GLX server version: " << glXQueryServerString(display, screenId, GLX_VERSION) << "\n";
std::cout << "GLX server vendoe: " << glXQueryServerString(display, screenId, GLX_VENDOR) << "\n";
std::cout << "GLX server extensions:\n\t " << glXQueryServerString(display, screenId, GLX_EXTENSIONS) << "\n";
}

GLint glxAttribs[] = {
GLX_X_RENDERABLE , True,
GLX_DRAWABLE_TYPE , GLX_WINDOW_BIT,
GLX_RENDER_TYPE , GLX_RGBA_BIT,
GLX_X_VISUAL_TYPE , GLX_TRUE_COLOR,
GLX_RED_SIZE , 8,
GLX_GREEN_SIZE , 8,
GLX_BLUE_SIZE , 8,
GLX_ALPHA_SIZE , 8,
GLX_DEPTH_SIZE , 24,
GLX_STENCIL_SIZE , 8,
GLX_DOUBLEBUFFER , True,
None
};

int fbcount;
GLXFBConfig* fbc = glXChooseFBConfig(display, screenId, glxAttribs, &fbcount);
if (fbc == 0) {
std::cout << "Failed to retrieve framebuffer.\n";
XCloseDisplay(display);
return 1;
}
std::cout << "Found " << fbcount << " matching framebuffers.\n";

// Pick the FB config/visual with the most samples per pixel
std::cout << "Getting best XVisualInfo\n";
int best_fbc = -1, worst_fbc = -1, best_num_samp = -1, worst_num_samp = 999;
for (int i = 0; i < fbcount; ++i) {
XVisualInfo *vi = glXGetVisualFromFBConfig( display, fbc[i] );
if ( vi != 0) {
int samp_buf, samples;
glXGetFBConfigAttrib( display, fbc[i], GLX_SAMPLE_BUFFERS, &samp_buf );
glXGetFBConfigAttrib( display, fbc[i], GLX_SAMPLES , &samples );
//std::cout << " Matching fbconfig " << i << ", SAMPLE_BUFFERS = " << samp_buf << ", SAMPLES = " << samples << ".\n";

if ( best_fbc < 0 || (samp_buf && samples > best_num_samp) ) {
best_fbc = i;
best_num_samp = samples;
}
if ( worst_fbc < 0 || !samp_buf || samples < worst_num_samp )
worst_fbc = i;
worst_num_samp = samples;
}
XFree( vi );
}
std::cout << "Best visual info index: " << best_fbc << "\n";
GLXFBConfig bestFbc = fbc[ best_fbc ];
XFree( fbc ); // Make sure to free this!

XVisualInfo* visual = glXGetVisualFromFBConfig( display, bestFbc );

if (visual == 0) {
std::cout << "Could not create correct visual window.\n";
XCloseDisplay(display);
return 1;
}

if (screenId != visual->screen) {
std::cout << "screenId(" << screenId << ") does not match visual->screen(" << visual->screen << ").\n";
XCloseDisplay(display);
return 1;

}

// Open the window
XSetWindowAttributes windowAttribs;
windowAttribs.border_pixel = BlackPixel(display, screenId);
windowAttribs.background_pixel = WhitePixel(display, screenId);
windowAttribs.override_redirect = True;
windowAttribs.colormap = XCreateColormap(display, RootWindow(display, screenId), visual->visual, AllocNone);
windowAttribs.event_mask = ExposureMask | KeyPressMask | KeyReleaseMask | KeymapStateMask | PointerMotionMask | ButtonPressMask | ButtonReleaseMask | EnterWindowMask | LeaveWindowMask;
window = XCreateWindow(display, RootWindow(display, screenId), 0, 0, game.settings.resolution.x, game.settings.resolution.y, 0, visual->depth, InputOutput, visual->visual, CWBackPixel | CWColormap | CWBorderPixel | CWEventMask, &windowAttribs);

// Create GLX OpenGL context
glXCreateContextAttribsARBProc glXCreateContextAttribsARB = 0;
glXCreateContextAttribsARB = (glXCreateContextAttribsARBProc) glXGetProcAddressARB( (const GLubyte *) "glXCreateContextAttribsARB" );

const char *glxExts = glXQueryExtensionsString( display, screenId );
std::cout << "Late extensions:\n\t" << glxExts << "\n\n";
if (glXCreateContextAttribsARB == 0) {
std::cout << "glXCreateContextAttribsARB() not found.\n";
}

int context_attribs[] = {
GLX_CONTEXT_MAJOR_VERSION_ARB, 3,
GLX_CONTEXT_MINOR_VERSION_ARB, 3,
GLX_CONTEXT_FLAGS_ARB, 0,
GLX_CONTEXT_PROFILE_MASK_ARB, GLX_CONTEXT_CORE_PROFILE_BIT_ARB,
None
};

GLXContext context = 0;
if (!isExtensionSupported( glxExts, "GLX_ARB_create_context")) {
context = glXCreateNewContext( display, bestFbc, GLX_RGBA_TYPE, 0, True );
}
else {
context = glXCreateContextAttribsARB( display, bestFbc, 0, true, context_attribs );
}
XSync( display, False );

// Verifying that context is a direct context
if (!glXIsDirect (display, context)) {
std::cout << "Indirect GLX rendering context obtained\n";
}
else {
std::cout << "Direct GLX rendering context obtained\n";
}
glXMakeCurrent(display, window, context);

game.HandleMessage(ENGINE_MESSAGE_CONSOLE, ENGINE_MESSAGE_OUTPUT, "TEST: Made Current...\n");


std::cout << "GL Vendor: " << glGetString(GL_VENDOR) << "\n";
std::cout << "GL Renderer: " << glGetString(GL_RENDERER) << "\n";
std::cout << "GL Version: " << glGetString(GL_VERSION) << "\n";
std::cout << "GL Shading Language: " << glGetString(GL_SHADING_LANGUAGE_VERSION) << "\n";


// Show the window
XClearWindow(display, window);
XMapRaised(display, window);
game.HandleMessage(ENGINE_MESSAGE_CONSOLE, ENGINE_MESSAGE_OUTPUT, "TEST: Made ClearWindow...\n");

// Set GL Sample stuff
glClearColor(0.5f, 0.6f, 0.7f, 1.0f);
game.HandleMessage(ENGINE_MESSAGE_CONSOLE, ENGINE_MESSAGE_OUTPUT, "TEST:ClearColor...\n");


// Resize window
/*unsigned int change_values = CWWidth | CWHeight;
XWindowChanges values;
values.width = game.settings.resolution.x;
values.height = game.settings.resolution.y;
XConfigureWindow(display, window, change_values, &values);*/


// Enter message loop
while (true) {
ReadEvents();
glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT);
}

// Cleanup
XDestroyWindow(display, window);
XCloseDisplay(display);

game.HandleMessage(ENGINE_MESSAGE_CONSOLE, ENGINE_MESSAGE_OUTPUT, "TEST: X11 Window Created...\n");
return true;

Advertisement

glXMakeCurrent also has a return-code, like the single one you don't check.. :)

Probably the context isn't properly made current there.

If it is, try getting the function pointers for the normal GL functions instead of relying on them by linking.

glXMakeCurrent also has a return-code, like the single one you don't check.. :)

Probably the context isn't properly made current there.

If it is, try getting the function pointers for the normal GL functions instead of relying on them by linking.

Thanks for the input Erik! It returned true, but at least now I know of a place to test for any future bugs :P So it's still not working. Any other ideas?

Did you try glXGetProcAddress to get the glGetString function pointer instead of calling it by its prototype?

(If I understand correctly and the first glGetString call to get GL_VENDOR crashes)

Did you try glXGetProcAddress to get the glGetString function pointer instead of calling it by its prototype?

(If I understand correctly and the first glGetString call to get GL_VENDOR crashes)

That wouldn't explain the issue with glClearColor or glClear, though, right? :S

Should be very easy to test, and it might, though I'm not sure I understand exactly what happens.

Does it always crash every time on the first glGetString if that call is there, and crashes on glClearColor instead if you comment out the glGetString?

Or do you mean that it randomly crashes on some GL call somewhere, but sometimes executes glGetString and properly prints it before crashing on glClearColor?

If it always crashes on the first GL call whatever that may be when MakeCurrent returns true it seems likely it's something with the function not calling the proper driver.. though just a guess.

If it is random it seems more likely that some memory corruption leads to a crash at some random point in the future or something.

You might also want to check your std::cout as I believe glGetString returns unsigned byte pointers.. if that is the case then they won't print a string (though it shouldn't crash as they should just print an address instead.. but you could cast them to char* and see if something changes).

Erik, I mean it crashes on the first OpenGL call. I'm going to look into it the mis-return of MakeCurrent, though I have no idea why that would be a problem :S

Again, I suggest then to do glXGetProcAddress("glGetString") and checking so that the function pointer returned is not null, and if it isn't then calling through that instead of using the regular glGetString. (Provided glXGetProcAddress doesn't crash instead, if it does, reinstall the driver).

And first maybe try glXMakeContextCurrent instead of glXMakeCurrent.

Also I see now that you don't seem to actually check that 'context' returned isn't NULL when you create it. MakeCurrent could return True on a NULL context by just disabling the thread context.

glXGetProcAddress((const GLubyte *)"glClearColor") and glXGetProcAddress((const GLubyte *)"glGetString") both return true.

This topic is closed to new replies.

Advertisement