Jump to content
  • Advertisement

globalhost

Member
  • Content Count

    7
  • Joined

  • Last visited

Everything posted by globalhost

  1. #include <iostream> #include <string> #include <vector> #include <GL/glew.h> #include <GLFW/glfw3.h> #include <glm/glm.hpp> static void callbackError(int error, const char* description) { fputs(description, stderr); } int main(void) { GLFWwindow* m_window; glfwSetErrorCallback(callbackError); if (!glfwInit()) std::cout << "??????: glfwInit()" << std::endl; /*glfwWindowHint(GLFW_CONTEXT_VERSION_MAJOR, 3); glfwWindowHint(GLFW_CONTEXT_VERSION_MINOR, 3); glfwWindowHint(GLFW_OPENGL_FORWARD_COMPAT, GL_TRUE); glfwWindowHint(GLFW_OPENGL_PROFILE, GLFW_OPENGL_CORE_PROFILE);*/ m_window = glfwCreateWindow(640, 480, "Test", NULL, NULL); if (!m_window) { glfwTerminate(); std::cout << "??????: glfwCreateWindow()" << std::endl; } glfwMakeContextCurrent(m_window); glewExperimental = GL_TRUE; GLenum glewError = glewInit(); if (glewError != GLEW_OK) { glfwTerminate(); exit(EXIT_FAILURE); } glViewport(0, 0, 640, 480); int oglMajor, oglMinor; glGetIntegerv(GL_MAJOR_VERSION, &oglMajor); glGetIntegerv(GL_MINOR_VERSION, &oglMinor); std::string ogl_vers = std::to_string(oglMajor) + "." + std::to_string(oglMinor); std::cout << std::endl << " ?????? OpenGL: " << ogl_vers << std::endl; GLuint VBO; glGenBuffers(1, &VBO); std::vector<glm::vec3> m_bufferDataPoints; m_bufferDataPoints.push_back(glm::vec3(0.0, 0.0, 0.0)); while (!glfwWindowShouldClose(m_window)) { glClear(GL_COLOR_BUFFER_BIT); glBindBuffer(GL_ARRAY_BUFFER, VBO); glBufferData(GL_ARRAY_BUFFER, m_bufferDataPoints.size() * sizeof(glm::vec3), &m_bufferDataPoints[0], GL_DYNAMIC_DRAW); glEnableVertexAttribArray(0); glBindBuffer(GL_ARRAY_BUFFER, VBO); glVertexAttribPointer(0, 3, GL_FLOAT, GL_FALSE, 0, (GLvoid*) 0); glDrawArrays(GL_POINTS, 0, m_bufferDataPoints.size()); glDisableVertexAttribArray(0); glfwSwapBuffers(m_window); glfwPollEvents(); } std::cout << std::endl << std::endl; m_bufferDataPoints.clear(); glDeleteBuffers(1, &VBO); glfwDestroyWindow(m_window); glfwTerminate(); } As a result, code execution is displayed point. Output in the console "OpenGL 3.0". When add glfwWindowHint(GLFW_CONTEXT_VERSION_MAJOR, 3); glfwWindowHint(GLFW_CONTEXT_VERSION_MINOR, 3); glfwWindowHint(GLFW_OPENGL_FORWARD_COMPAT, GL_TRUE); glfwWindowHint(GLFW_OPENGL_PROFILE, GLFW_OPENGL_CORE_PROFILE); do not see anything (black screen). In the console "OpenGL 3.3". And there are no errors.
  2.   Yes!   glTexImage2D(GL_TEXTURE_2D, 0, GL_RGBA, m_width, m_height, 0, GL_RGBA, GL_UNSIGNED_BYTE, 0);   Replaced by:   glTexImage2D(GL_TEXTURE_2D, 0, GL_RGBA, 512, 512, 0, GL_RGBA, GL_UNSIGNED_BYTE, 0);   And now it works! Thank you! 
  3. I initialize framebuffer. Then, in a cycle I render the scene to a texture, treat its shader and deduce the screen. On my PC everything is OK. (Radeon HD 7870.) On another PC (GeForce FX 5200) the function glCheckFramebufferStatusEXT returns error "8cdd" and render a black screen with a framerate of 0-1 fps. Source code: #include "main.hpp" GLuint fbo, fbo_texture, rbo_depth; GLuint vbo_fbo_vertices; GLuint program_postproc, attribute_v_coord_postproc, uniform_fbo_texture; GLuint vs, fs; Shader shader; int main(void) { init(); glGenFramebuffersEXT(1, &fbo); glBindFramebufferEXT(GL_FRAMEBUFFER_EXT, fbo); glEnable(GL_TEXTURE_2D); glGenTextures(1, &fbo_texture); glBindTexture(GL_TEXTURE_2D, fbo_texture); glTexImage2D(GL_TEXTURE_2D, 0, GL_RGBA, m_width, m_height, 0, GL_RGBA, GL_UNSIGNED_BYTE, 0); glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_NEAREST); glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_NEAREST); glFramebufferTexture2DEXT(GL_FRAMEBUFFER_EXT, GL_COLOR_ATTACHMENT0, GL_TEXTURE_2D, fbo_texture, 0); GLenum status; if ((status = glCheckFramebufferStatusEXT(GL_FRAMEBUFFER_EXT)) != GL_FRAMEBUFFER_COMPLETE_EXT) { fprintf(stderr, "glCheckFramebufferStatus: error %p", status); } glBindFramebufferEXT(GL_FRAMEBUFFER_EXT, 0); GLfloat fbo_vertices[] = { -1, -1, 1, -1,-1, 1, 1, 1 }; glGenBuffers(1, &vbo_fbo_vertices); glBindBuffer(GL_ARRAY_BUFFER, vbo_fbo_vertices); glBufferData(GL_ARRAY_BUFFER, sizeof(fbo_vertices), fbo_vertices, GL_STATIC_DRAW); glBindBuffer(GL_ARRAY_BUFFER, 0); shader.load("shaders/post_processing.vert", "shaders/post_processing.frag"); attribute_v_coord_postproc = glGetAttribLocation(shader.program(), "v_coord"); if (attribute_v_coord_postproc == -1) { fprintf(stderr, "Could not bind attribute %s\n", "v_coord"); return 0; } uniform_fbo_texture = glGetUniformLocation(shader.program(), "fbo_texture"); if (uniform_fbo_texture == -1) { fprintf(stderr, "Could not bind uniform %s\n", "fbo_texture"); return 0; } while (!glfwWindowShouldClose(m_window)) { glBindFramebufferEXT(GL_FRAMEBUFFER_EXT, fbo); glClear(GL_COLOR_BUFFER_BIT); glBindFramebufferEXT(GL_FRAMEBUFFER_EXT, 0); glClear(GL_COLOR_BUFFER_BIT); shader.use(); glBindTexture(GL_TEXTURE_2D, fbo_texture); glUniform1i(uniform_fbo_texture, /*GL_TEXTURE*/0); glEnableVertexAttribArray(attribute_v_coord_postproc); glBindBuffer(GL_ARRAY_BUFFER, vbo_fbo_vertices); glVertexAttribPointer(attribute_v_coord_postproc, 2, GL_FLOAT, GL_FALSE, 0, 0); glDrawArrays(GL_TRIANGLE_STRIP, 0, 4); glDisableVertexAttribArray(attribute_v_coord_postproc); glfwSwapBuffers(m_window); glfwPollEvents(); } glDeleteRenderbuffersEXT(1, &rbo_depth); glDeleteTextures(1, &fbo_texture); glDeleteFramebuffersEXT(1, &fbo); glDeleteBuffers(1, &vbo_fbo_vertices); glDeleteProgram(shader.program()); glfwDestroyWindow(m_window); glfwTerminate(); exit(EXIT_SUCCESS); } void callbackError(int error, const char* description) { fputs(description, stderr); } void callbackKey(GLFWwindow* window, int key, int scancode, int action, int mods) { if (key == GLFW_KEY_ESCAPE && action == GLFW_PRESS) glfwSetWindowShouldClose(window, GL_TRUE); } void callbackFramebufferSize(GLFWwindow* window, int width, int height) { m_width = width; m_height = height; glBindTexture(GL_TEXTURE_2D, fbo_texture); glTexImage2D(GL_TEXTURE_2D, 0, GL_RGBA, m_width, m_height, 0, GL_RGBA, GL_UNSIGNED_BYTE, 0); glBindTexture(GL_TEXTURE_2D, 0); } void init() { glfwSetErrorCallback(callbackError); if (!glfwInit()) exit(EXIT_FAILURE); m_width = 800; m_height = 600; m_window = glfwCreateWindow(m_width, m_height, "Framebuffer Test", NULL, NULL); if (!m_window) { glfwTerminate(); exit(EXIT_FAILURE); } glfwMakeContextCurrent(m_window); glfwSwapInterval(0); glfwSetKeyCallback(m_window, callbackKey); glfwSetFramebufferSizeCallback(m_window, callbackFramebufferSize); glewExperimental = GL_TRUE; if (glewInit() != GLEW_OK) std::cout << "GLEW Init Error" << std::endl; glClearColor(0.2, 0.3, 0.4, 1.0); glEnable(GL_BLEND); glBlendFunc(GL_SRC_ALPHA, GL_ONE_MINUS_SRC_ALPHA); } Result: http://itmag.es/4KxQ5/ http://itmag.es/H5RD/    
  4. globalhost

    FTGL artefact

    Source code: #include <GLFW/glfw3.h> #include <SOIL/SOIL.h> #include <FTGL/ftgl.h> #include <stdlib.h> #include <stdio.h> FTFont *m_font1, *m_font2; int width = 800, height = 600; static void error_callback(int error, const char* description) { fputs(description, stderr); } static void key_callback(GLFWwindow* window, int key, int scancode, int action, int mods) { if (key == GLFW_KEY_ESCAPE && action == GLFW_PRESS) glfwSetWindowShouldClose(window, GL_TRUE); } int main(void) { GLFWwindow* window; glfwSetErrorCallback(error_callback); if (!glfwInit()) exit(EXIT_FAILURE); window = glfwCreateWindow(800, 600, "Simple example", NULL, NULL); if (!window) { glfwTerminate(); exit(EXIT_FAILURE); } glfwMakeContextCurrent(window); glfwSwapInterval(1); glfwSetKeyCallback(window, key_callback); glEnable (GL_BLEND); glBlendFunc (GL_SRC_ALPHA, GL_ONE_MINUS_SRC_ALPHA); glEnable(GL_TEXTURE_2D); // ???????? ????????. GLuint tex_2d = SOIL_load_OGL_texture ( "object.png", SOIL_LOAD_AUTO, SOIL_CREATE_NEW_ID, SOIL_FLAG_MIPMAPS | SOIL_FLAG_INVERT_Y | SOIL_FLAG_NTSC_SAFE_RGB | SOIL_FLAG_COMPRESS_TO_DXT ); m_font1 = new FTBufferFont("fonts/Linux Biolinum/LinBiolinum_R.otf"); m_font2 = new FTBufferFont("fonts/Linux Biolinum/LinBiolinum_R.otf"); if (m_font1->Error()) return -1; if (m_font2->Error()) return -1; m_font1->FaceSize(20); m_font2->FaceSize(16); glClearColor(0.4, 0.5, 0.6, 1.0); glViewport(0, 0, width, height); glMatrixMode(GL_PROJECTION); glLoadIdentity(); glOrtho(0.0, width, 0.0, height, 1, -1); glMatrixMode(GL_MODELVIEW); glLoadIdentity(); while (!glfwWindowShouldClose(window)) { glClear( GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT ); glTranslatef(0.1, 0.1, 0.0); glColor3f(1.0f, 1.0f, 1.0f); glBindTexture(GL_TEXTURE_2D, tex_2d); glBegin(GL_QUADS); glTexCoord2f (0.0f, 0.0f); glVertex3f(200.0, 200.0, 0); glTexCoord2f (1.0f, 0.0f); glVertex3f(500.0, 200.0, 0); glTexCoord2f (1.0f, 1.0f); glVertex3f(500.0, 500.0, 0); glTexCoord2f (0.0f, 1.0f); glVertex3f(200.0, 500.0, 0); glEnd(); glColor3f(1.0f, 1.0f, 1.0f); for (int i = 0; i < 1; ++i) m_font1->Render("Testing FTGL", -1, FTPoint(50, 50, 0)); glfwSwapBuffers(window); glfwPollEvents(); } glfwDestroyWindow(window); glfwTerminate(); exit(EXIT_SUCCESS); } The text in screenshot 1 looks good. (The program is run on the graphics card Radeon HD 7870.) The text in screenshot 1 looks bad. (The program is run on the graphics card Geforce2 MX 100 / 200.)   What needs to be fixed?    
  5. Main function int main() { init(); while (!glfwWindowShouldClose(window)) { glClear(GL_COLOR_BUFFER_BIT); currentTime = glfwGetTime(); ++frames; if (currentTime - lastTime >= 1.0) { fps = frames; frames = 0; lastTime += 1.0; } glPushAttrib(GL_TEXTURE_BIT); glColor3f(1.0f,1.0f,1.0f); for (int j = 0; j < 16; ++j) for (int i = 0; i < 12; ++i) font->Render(std::to_string(fps).c_str(), -1, FTPoint(i*60, height - j*30 - font->LineHeight(), 0)); glPopAttrib(); glfwSwapBuffers(window); glfwPollEvents(); } glfwDestroyWindow(window); glfwTerminate(); } Create font FTFont *font; font = new FTTextureFont("fonts/Linux Biolinum/LinBiolinum_R.otf"); font->FaceSize(20); The problem As you can see in the screenshots, with an increase in the number of inscriptions fps drop occurs at times!
  6. globalhost

    OpenGL + FTGL. Falling productivity

    Good. So the optimization is not required. Thank you all for your help!
  7. globalhost

    OpenGL + FTGL. Falling productivity

    It's very slow. On my video card Radeon HD 7870 can be 172 frames. How many frames will be on the graphics card released in 2000 yr? Scary to imagine.
  • Advertisement
×

Important Information

By using GameDev.net, you agree to our community Guidelines, Terms of Use, and Privacy Policy.

GameDev.net is your game development community. Create an account for your GameDev Portfolio and participate in the largest developer community in the games industry.

Sign me up!