• Advertisement
  • Popular Tags

  • Popular Now

  • Advertisement
  • Similar Content

    • By Fadey Duh
      Good evening everyone!

      I was wondering if there is something equivalent of  GL_NV_blend_equation_advanced for AMD?
      Basically I'm trying to find more compatible version of it.

      Thank you!
    • By Jens Eckervogt
      Hello guys, 
       
      Please tell me! 
      How do I know? Why does wavefront not show for me?
      I already checked I have non errors yet.
      using OpenTK; using System.Collections.Generic; using System.IO; using System.Text; namespace Tutorial_08.net.sourceskyboxer { public class WaveFrontLoader { private static List<Vector3> inPositions; private static List<Vector2> inTexcoords; private static List<Vector3> inNormals; private static List<float> positions; private static List<float> texcoords; private static List<int> indices; public static RawModel LoadObjModel(string filename, Loader loader) { inPositions = new List<Vector3>(); inTexcoords = new List<Vector2>(); inNormals = new List<Vector3>(); positions = new List<float>(); texcoords = new List<float>(); indices = new List<int>(); int nextIdx = 0; using (var reader = new StreamReader(File.Open("Contents/" + filename + ".obj", FileMode.Open), Encoding.UTF8)) { string line = reader.ReadLine(); int i = reader.Read(); while (true) { string[] currentLine = line.Split(); if (currentLine[0] == "v") { Vector3 pos = new Vector3(float.Parse(currentLine[1]), float.Parse(currentLine[2]), float.Parse(currentLine[3])); inPositions.Add(pos); if (currentLine[1] == "t") { Vector2 tex = new Vector2(float.Parse(currentLine[1]), float.Parse(currentLine[2])); inTexcoords.Add(tex); } if (currentLine[1] == "n") { Vector3 nom = new Vector3(float.Parse(currentLine[1]), float.Parse(currentLine[2]), float.Parse(currentLine[3])); inNormals.Add(nom); } } if (currentLine[0] == "f") { Vector3 pos = inPositions[0]; positions.Add(pos.X); positions.Add(pos.Y); positions.Add(pos.Z); Vector2 tc = inTexcoords[0]; texcoords.Add(tc.X); texcoords.Add(tc.Y); indices.Add(nextIdx); ++nextIdx; } reader.Close(); return loader.loadToVAO(positions.ToArray(), texcoords.ToArray(), indices.ToArray()); } } } } } And It have tried other method but it can't show for me.  I am mad now. Because any OpenTK developers won't help me.
      Please help me how do I fix.

      And my download (mega.nz) should it is original but I tried no success...
      - Add blend source and png file here I have tried tried,.....  
       
      PS: Why is our community not active? I wait very longer. Stop to lie me!
      Thanks !
    • By codelyoko373
      I wasn't sure if this would be the right place for a topic like this so sorry if it isn't.
      I'm currently working on a project for Uni using FreeGLUT to make a simple solar system simulation. I've got to the point where I've implemented all the planets and have used a Scene Graph to link them all together. The issue I'm having with now though is basically the planets and moons orbit correctly at their own orbit speeds.
      I'm not really experienced with using matrices for stuff like this so It's likely why I can't figure out how exactly to get it working. This is where I'm applying the transformation matrices, as well as pushing and popping them. This is within the Render function that every planet including the sun and moons will have and run.
      if (tag != "Sun") { glRotatef(orbitAngle, orbitRotation.X, orbitRotation.Y, orbitRotation.Z); } glPushMatrix(); glTranslatef(position.X, position.Y, position.Z); glRotatef(rotationAngle, rotation.X, rotation.Y, rotation.Z); glScalef(scale.X, scale.Y, scale.Z); glDrawElements(GL_TRIANGLES, mesh->indiceCount, GL_UNSIGNED_SHORT, mesh->indices); if (tag != "Sun") { glPopMatrix(); } The "If(tag != "Sun")" parts are my attempts are getting the planets to orbit correctly though it likely isn't the way I'm meant to be doing it. So I was wondering if someone would be able to help me? As I really don't have an idea on what I would do to get it working. Using the if statement is truthfully the closest I've got to it working but there are still weird effects like the planets orbiting faster then they should depending on the number of planets actually be updated/rendered.
    • By Jens Eckervogt
      Hello everyone, 
      I have problem with texture
      using System; using OpenTK; using OpenTK.Input; using OpenTK.Graphics; using OpenTK.Graphics.OpenGL4; using System.Drawing; using System.Reflection; namespace Tutorial_05 { class Game : GameWindow { private static int WIDTH = 1200; private static int HEIGHT = 720; private static KeyboardState keyState; private int vaoID; private int vboID; private int iboID; private Vector3[] vertices = { new Vector3(-0.5f, 0.5f, 0.0f), // V0 new Vector3(-0.5f, -0.5f, 0.0f), // V1 new Vector3(0.5f, -0.5f, 0.0f), // V2 new Vector3(0.5f, 0.5f, 0.0f) // V3 }; private Vector2[] texcoords = { new Vector2(0, 0), new Vector2(0, 1), new Vector2(1, 1), new Vector2(1, 0) }; private int[] indices = { 0, 1, 3, 3, 1, 2 }; private string vertsrc = @"#version 450 core in vec3 position; in vec2 textureCoords; out vec2 pass_textureCoords; void main(void) { gl_Position = vec4(position, 1.0); pass_textureCoords = textureCoords; }"; private string fragsrc = @"#version 450 core in vec2 pass_textureCoords; out vec4 out_color; uniform sampler2D textureSampler; void main(void) { out_color = texture(textureSampler, pass_textureCoords); }"; private int programID; private int vertexShaderID; private int fragmentShaderID; private int textureID; private Bitmap texsrc; public Game() : base(WIDTH, HEIGHT, GraphicsMode.Default, "Tutorial 05 - Texturing", GameWindowFlags.Default, DisplayDevice.Default, 4, 5, GraphicsContextFlags.Default) { } protected override void OnLoad(EventArgs e) { base.OnLoad(e); CursorVisible = true; GL.GenVertexArrays(1, out vaoID); GL.BindVertexArray(vaoID); GL.GenBuffers(1, out vboID); GL.BindBuffer(BufferTarget.ArrayBuffer, vboID); GL.BufferData(BufferTarget.ArrayBuffer, (IntPtr)(vertices.Length * Vector3.SizeInBytes), vertices, BufferUsageHint.StaticDraw); GL.GenBuffers(1, out iboID); GL.BindBuffer(BufferTarget.ElementArrayBuffer, iboID); GL.BufferData(BufferTarget.ElementArrayBuffer, (IntPtr)(indices.Length * sizeof(int)), indices, BufferUsageHint.StaticDraw); vertexShaderID = GL.CreateShader(ShaderType.VertexShader); GL.ShaderSource(vertexShaderID, vertsrc); GL.CompileShader(vertexShaderID); fragmentShaderID = GL.CreateShader(ShaderType.FragmentShader); GL.ShaderSource(fragmentShaderID, fragsrc); GL.CompileShader(fragmentShaderID); programID = GL.CreateProgram(); GL.AttachShader(programID, vertexShaderID); GL.AttachShader(programID, fragmentShaderID); GL.LinkProgram(programID); // Loading texture from embedded resource texsrc = new Bitmap(Assembly.GetEntryAssembly().GetManifestResourceStream("Tutorial_05.example.png")); textureID = GL.GenTexture(); GL.BindTexture(TextureTarget.Texture2D, textureID); GL.TexParameter(TextureTarget.Texture2D, TextureParameterName.TextureMagFilter, (int)All.Linear); GL.TexParameter(TextureTarget.Texture2D, TextureParameterName.TextureMinFilter, (int)All.Linear); GL.TexImage2D(TextureTarget.Texture2D, 0, PixelInternalFormat.Rgba, texsrc.Width, texsrc.Height, 0, PixelFormat.Bgra, PixelType.UnsignedByte, IntPtr.Zero); System.Drawing.Imaging.BitmapData bitmap_data = texsrc.LockBits(new Rectangle(0, 0, texsrc.Width, texsrc.Height), System.Drawing.Imaging.ImageLockMode.ReadOnly, System.Drawing.Imaging.PixelFormat.Format32bppRgb); GL.TexSubImage2D(TextureTarget.Texture2D, 0, 0, 0, texsrc.Width, texsrc.Height, PixelFormat.Bgra, PixelType.UnsignedByte, bitmap_data.Scan0); texsrc.UnlockBits(bitmap_data); GL.Enable(EnableCap.Texture2D); GL.BufferData(BufferTarget.TextureBuffer, (IntPtr)(texcoords.Length * Vector2.SizeInBytes), texcoords, BufferUsageHint.StaticDraw); GL.BindAttribLocation(programID, 0, "position"); GL.BindAttribLocation(programID, 1, "textureCoords"); } protected override void OnResize(EventArgs e) { base.OnResize(e); GL.Viewport(0, 0, ClientRectangle.Width, ClientRectangle.Height); } protected override void OnUpdateFrame(FrameEventArgs e) { base.OnUpdateFrame(e); keyState = Keyboard.GetState(); if (keyState.IsKeyDown(Key.Escape)) { Exit(); } } protected override void OnRenderFrame(FrameEventArgs e) { base.OnRenderFrame(e); // Prepare for background GL.Clear(ClearBufferMask.ColorBufferBit); GL.ClearColor(Color4.Red); // Draw traingles GL.EnableVertexAttribArray(0); GL.EnableVertexAttribArray(1); GL.BindVertexArray(vaoID); GL.UseProgram(programID); GL.BindBuffer(BufferTarget.ArrayBuffer, vboID); GL.VertexAttribPointer(0, 3, VertexAttribPointerType.Float, false, 0, IntPtr.Zero); GL.ActiveTexture(TextureUnit.Texture0); GL.BindTexture(TextureTarget.Texture3D, textureID); GL.BindBuffer(BufferTarget.ElementArrayBuffer, iboID); GL.DrawElements(BeginMode.Triangles, indices.Length, DrawElementsType.UnsignedInt, 0); GL.DisableVertexAttribArray(0); GL.DisableVertexAttribArray(1); SwapBuffers(); } protected override void OnClosed(EventArgs e) { base.OnClosed(e); GL.DeleteVertexArray(vaoID); GL.DeleteBuffer(vboID); } } } I can not remember where do I add GL.Uniform2();
    • By Jens Eckervogt
      Hello everyone
      For @80bserver8 nice job - I have found Google search. How did you port from Javascript WebGL to C# OpenTK.?
      I have been searched Google but it shows f***ing Unity 3D. I really want know how do I understand I want start with OpenTK But I want know where is porting of Javascript and C#?
       
      Thanks!
  • Advertisement
  • Advertisement
Sign in to follow this  

OpenGL How does GLFlush() works?

This topic is 2222 days old which is more than the 365 day threshold we allow for new replies. Please post a new topic.

If you intended to correct an error in the post then please contact us.

Recommended Posts

Hi everyone,
I am a newbie to OpenGL programming. I've started learning OpenGL very recently
can anyone please explain me how the function GLFlush works in simple terms with an example
that can help me understand it better?

Thankyou

Share this post


Link to post
Share on other sites
Advertisement
When you issue a drawcall (or any other opengl command) the implementation can keep it in a buffer instead of sending it to the driver (and the driver can buffer things before sending them to the GPU), this allows the implementation to merge drawcalls into batches to reduce context switches and other nice things.

The problem with this is that your drawcalls might not get sent immediatly, Most of the time this is a good thing (Buffering is good) but in some situations you might want the implementation to flush its buffers early.

The main usecase i can think of for this is if you have a render function that looks something like this:

1) bunch of opengl drawcalls
2) do some time consuming calculations or i/o
3) more drawcalls
4) more time consuming cpu stuff
4) swap the buffers

In this case it can be a good idea to flush the buffers before you do the time consuming calculations to get the GPU working on the drawcalls. (if you don't flush then the implementation might keep some or all commands buffered until you try to swap the buffers as it is expecting you to send more data its way).

Its important to note that the buffer swap will not only flush the buffers, it will also stall until all rendering is complete and thus it can be beneficial to have a game loop that does something like:

1) issue render calls
2) flush
3) update gamestate (This can take quite some time in complex games)
4) swap buffers

This way the GPU will process the drawcalls while you update the gamestate. (Some implementations will use triple buffering or pre-rendered frames to get a similar effect with a update->render->swap loop but you can't always count on that)

(With double buffering and no pre-rendering a update->render->swap loop will leave the GPU idle while you update the gamestate and the CPU idle while the GPU finishes rendering and swapping which is fairly inefficient)

Share this post


Link to post
Share on other sites
I don't think you'll ever need to use glFlush. When you call swap buffers, it automatically does a flush in any case, so if you're double buffering (as almost all use cases do these days), calling glFlush won't gain you anything. So in Simon's example, you can replace 4 steps with 3:

1) Issue render calls
2) Swap buffers
3) Update game state

, which is pretty much what you'd do if you didn't know glFlush existed.

Share this post


Link to post
Share on other sites

I don't think you'll ever need to use glFlush. When you call swap buffers, it automatically does a flush in any case, so if you're double buffering (as almost all use cases do these days), calling glFlush won't gain you anything. So in Simon's example, you can replace 4 steps with 3:

1) Issue render calls
2) Swap buffers
3) Update game state

, which is pretty much what you'd do if you didn't know glFlush existed.


The problem with a render->swap->update or update->render->swap loop (they are identical really) is that the swap waits for the render to finish completely which in turn means that the GPU will be idle during the update (after the swap all drawcalls will be completely processed) glFlush doesn't wait for the render to finish, it returns immediatly allowing you to send off all commands to the GPU without swapping the buffers. (This way you can squeeze in all non-rendering related CPU work while the GPU is working by putting it between a flush and the swap)

Once you start pushing the limits of the hardware a bit it will pay off to have the GPU and CPU work at the same time instead of constantly waiting for eachother. (a multithreaded approach is even better these days but also significantly more complex and thus error prone)

Share this post


Link to post
Share on other sites
Thankyou all for replying,

but still i am unable to understand how GLFlush works,

is it mandatory to write GLFlush after all the drawing functions between glBegin and glEnd?

i've written a sample code please go through it

#include<GL/gl.h>
#include<GL/glu.h>
#include<GL/glut.h>
#include<stdio.h>
#include<stdlib.h>
void display1()
{
glClear(GL_COLOR_BUFFER_BIT);
glColor3f(1.0, 1.0, 1.0);
glBegin(GL_POLYGON);
glVertex3f(0.25,0.25,0.0);
glVertex3f(0.75,0.25,0.0);
glVertex3f(0.75,0.75,0.0);
glVertex3f(0.25,0.75,0.0);
glEnd();

glFlush();

}
void display2()
{
glClear(GL_COLOR_BUFFER_BIT);
glColor3f(1.0, 1.0, 0.0);
glBegin(GL_LINE);
glVertex3f(0.25,0.25,0.0);
glVertex3f(0.75,0.25,0.0);
glVertex3f(0.75,0.75,0.0);
glVertex3f(0.25,0.75,0.0);
glEnd();


glFlush();

}
void display3()
{
glColor3f(0.0,1.0,1.0);
glBegin(GL_TRIANGLES);
glVertex3f(0.25,0.25,0.0);
glVertex3f(0.75,0.25,0.0);
glVertex3f(0.75,0.75,0.0);
glEnd();
//glClear(GL_COLOR_BUFFER_BIT);
glFlush();
}
void init()
{
glClearColor(0.0,0.0,0.0,0.0);
glMatrixMode(GL_PROJECTION);
glLoadIdentity();
glOrtho(0.0,1.0,0.0,1.0,-1.0,1.0);
}
int main(int argc,char *argv[])
{
glutInit(&argc,argv);
glutInitDisplayMode(GLUT_SINGLE | GLUT_RGB);
glutInitWindowSize(500 , 500);
glutCreateWindow("My First OpenGL program");
init();
glutDisplayFunc(display1);
glutDisplayFunc(display2);
glutDisplayFunc(display3);
glutMainLoop();
return 0;
}


In the above code in 3 functions display1,2 and 3,
when i specify GLFlush then only i am able to see the object
or else i am unable to see anything why is it like that?

can you explain me in detail why GLFLUSH is being used here
with a small lucid example?

thanks in advance

Share this post


Link to post
Share on other sites
Commands are typically not executed immediately but stored in a command buffer. A flush is implied when you swap buffers in a double buffered environment, which is why you don't need ti flush it yourself. But in a single buffered environment like your code, the commands are waiting in the command buffer until it is flushed, and only you can flush it since there is no implied flush point anymore (swapping double buffers is an implied flush point). If you don't flush the buffer, the commands are still in the buffer waiting to be executed, so nothing is drawn.

Unless you're using single buffering, you should never have to use glFlush or glFinish.

Share this post


Link to post
Share on other sites
Thankyou Brother Bob for replying,

how long will the commands stay in command buffer?

why because even after the complete execution of program, will the commands still stay there?

please tell me

Share this post


Link to post
Share on other sites
Why are you so concerned with glFlush GG? It's honestly not something you should worry about or need to use until you spot an opportunity to optimise there. As has been pointed out, it will automatically be flushed when the buffers are swapped. Otherwise, how long it remains buffered depends on many things, including the driver implementation defined size of the buffers.

And no, I have no idea why flush is being used in that example, unless it isn't double buffering and needs to draw something directly to a front buffer, i.e. no swap buffers.

Share this post


Link to post
Share on other sites

Unless you're using single buffering, you should never have to use glFlush or glFinish.


Thankyou all for replying,

but still i am unable to understand how GLFlush works,

is it mandatory to write GLFlush after all the drawing functions between glBegin and glEnd?

i've written a sample code please go through it

#include<GL/gl.h>
#include<GL/glu.h>
#include<GL/glut.h>
#include<stdio.h>
#include<stdlib.h>
void display1()
{
glClear(GL_COLOR_BUFFER_BIT);
glColor3f(1.0, 1.0, 1.0);
glBegin(GL_POLYGON);
glVertex3f(0.25,0.25,0.0);
glVertex3f(0.75,0.25,0.0);
glVertex3f(0.75,0.75,0.0);
glVertex3f(0.25,0.75,0.0);
glEnd();

glFlush();

}
void display2()
{
glClear(GL_COLOR_BUFFER_BIT);
glColor3f(1.0, 1.0, 0.0);
glBegin(GL_LINE);
glVertex3f(0.25,0.25,0.0);
glVertex3f(0.75,0.25,0.0);
glVertex3f(0.75,0.75,0.0);
glVertex3f(0.25,0.75,0.0);
glEnd();


glFlush();

}
void display3()
{
glColor3f(0.0,1.0,1.0);
glBegin(GL_TRIANGLES);
glVertex3f(0.25,0.25,0.0);
glVertex3f(0.75,0.25,0.0);
glVertex3f(0.75,0.75,0.0);
glEnd();
//glClear(GL_COLOR_BUFFER_BIT);
glFlush();
}
void init()
{
glClearColor(0.0,0.0,0.0,0.0);
glMatrixMode(GL_PROJECTION);
glLoadIdentity();
glOrtho(0.0,1.0,0.0,1.0,-1.0,1.0);
}
int main(int argc,char *argv[])
{
glutInit(&argc,argv);
glutInitDisplayMode(GLUT_SINGLE | GLUT_RGB);
glutInitWindowSize(500 , 500);
glutCreateWindow("My First OpenGL program");
init();
glutDisplayFunc(display1);
glutDisplayFunc(display2);
glutDisplayFunc(display3);
glutMainLoop();
return 0;
}


In the above code in 3 functions display1,2 and 3,
when i specify GLFlush then only i am able to see the object
or else i am unable to see anything why is it like that?

can you explain me in detail why GLFLUSH is being used here
with a small lucid example?

thanks in advance


In this case its used because you are using a single buffer (and thus no buffer swap that forces a flush), if you switch
glutInitDisplayMode(GLUT_SINGLE | GLUT_RGB);
to
glutInitDisplayMode(GLUT_DOUBLE | GLUT_RGB);
you no longer have to flush to get things on screen (as a glFinish is implied on the swap). (As i said above though, flushing can still be useful for performance reasons if you run a single threaded application)

Share this post


Link to post
Share on other sites
The commands stays in the buffer for as long as necessary. If there is nothing that flushes it, they may even stay there forever and you will never get your triangle on the screen.

Share this post


Link to post
Share on other sites
Sign in to follow this  

  • Advertisement