This is my code
#include <GL/glee.h>#include <GL/glfw.h>#include <stdio.h>int main(){ if (!glfwInit()){ printf("Error initializing GLFW\n"); return -1; } glfwOpenWindowHint(GLFW_OPENGL_VERSION_MAJOR, 3); glfwOpenWindowHint(GLFW_OPENGL_VERSION_MINOR, 3); if(glfwOpenWindow(800,600,8,8,8,8,0,0, GLFW_WINDOW) != GL_TRUE) { glfwTerminate(); printf("Error creating window\n"); return -1; } printf("Window created OK, OpenGL %d.%d\n", glfwGetWindowParam(GLFW_OPENGL_VERSION_MAJOR), glfwGetWindowParam(GLFW_OPENGL_VERSION_MINOR)); GLuint vertexId, pixelId; printf("%x ", glGetError()); vertexId = glCreateShader(GL_VERTEX_SHADER); printf("%x ", glGetError()); pixelId = glCreateShader(GL_FRAGMENT_SHADER); printf("%x ", glGetError()); bool running = true; while(running){ glViewport(0, 0, 800, 600); glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT); glfwSwapBuffers(); running = !glfwGetKey( GLFW_KEY_ESC ) && glfwGetWindowParam( GLFW_OPENED ); } glfwTerminate(); return 0;}
When I run it, the first glGetError() shows 0 (as expected), but the 2nd and 3rd show 0x500 (INVALID_ENUM)
This is kinda weird since glCreateShader shouldn't return that unless GL_VERTEX_SHADER definition was wrong, which I think it isn't.
If I change the context to 2.x, it works perfectly
Does anybody have a clue regarding why that is happening? Is it an incompatibility between glfw and glee?
My compiler line is g++ test.cpp -o test.exe -lglfw -lglee -lopengl32
Thanks,
Gonzalo
[Edited by - gzaloprgm on December 9, 2010 8:41:31 PM]