• Advertisement
  • Popular Tags

  • Popular Now

  • Advertisement
  • Similar Content

    • By codelyoko373
      I wasn't sure if this would be the right place for a topic like this so sorry if it isn't.
      I'm currently working on a project for Uni using FreeGLUT to make a simple solar system simulation. I've got to the point where I've implemented all the planets and have used a Scene Graph to link them all together. The issue I'm having with now though is basically the planets and moons orbit correctly at their own orbit speeds.
      I'm not really experienced with using matrices for stuff like this so It's likely why I can't figure out how exactly to get it working. This is where I'm applying the transformation matrices, as well as pushing and popping them. This is within the Render function that every planet including the sun and moons will have and run.
      if (tag != "Sun") { glRotatef(orbitAngle, orbitRotation.X, orbitRotation.Y, orbitRotation.Z); } glPushMatrix(); glTranslatef(position.X, position.Y, position.Z); glRotatef(rotationAngle, rotation.X, rotation.Y, rotation.Z); glScalef(scale.X, scale.Y, scale.Z); glDrawElements(GL_TRIANGLES, mesh->indiceCount, GL_UNSIGNED_SHORT, mesh->indices); if (tag != "Sun") { glPopMatrix(); } The "If(tag != "Sun")" parts are my attempts are getting the planets to orbit correctly though it likely isn't the way I'm meant to be doing it. So I was wondering if someone would be able to help me? As I really don't have an idea on what I would do to get it working. Using the if statement is truthfully the closest I've got to it working but there are still weird effects like the planets orbiting faster then they should depending on the number of planets actually be updated/rendered.
    • By Jens Eckervogt
      Hello everyone, 
      I have problem with texture
      using System; using OpenTK; using OpenTK.Input; using OpenTK.Graphics; using OpenTK.Graphics.OpenGL4; using System.Drawing; using System.Reflection; namespace Tutorial_05 { class Game : GameWindow { private static int WIDTH = 1200; private static int HEIGHT = 720; private static KeyboardState keyState; private int vaoID; private int vboID; private int iboID; private Vector3[] vertices = { new Vector3(-0.5f, 0.5f, 0.0f), // V0 new Vector3(-0.5f, -0.5f, 0.0f), // V1 new Vector3(0.5f, -0.5f, 0.0f), // V2 new Vector3(0.5f, 0.5f, 0.0f) // V3 }; private Vector2[] texcoords = { new Vector2(0, 0), new Vector2(0, 1), new Vector2(1, 1), new Vector2(1, 0) }; private int[] indices = { 0, 1, 3, 3, 1, 2 }; private string vertsrc = @"#version 450 core in vec3 position; in vec2 textureCoords; out vec2 pass_textureCoords; void main(void) { gl_Position = vec4(position, 1.0); pass_textureCoords = textureCoords; }"; private string fragsrc = @"#version 450 core in vec2 pass_textureCoords; out vec4 out_color; uniform sampler2D textureSampler; void main(void) { out_color = texture(textureSampler, pass_textureCoords); }"; private int programID; private int vertexShaderID; private int fragmentShaderID; private int textureID; private Bitmap texsrc; public Game() : base(WIDTH, HEIGHT, GraphicsMode.Default, "Tutorial 05 - Texturing", GameWindowFlags.Default, DisplayDevice.Default, 4, 5, GraphicsContextFlags.Default) { } protected override void OnLoad(EventArgs e) { base.OnLoad(e); CursorVisible = true; GL.GenVertexArrays(1, out vaoID); GL.BindVertexArray(vaoID); GL.GenBuffers(1, out vboID); GL.BindBuffer(BufferTarget.ArrayBuffer, vboID); GL.BufferData(BufferTarget.ArrayBuffer, (IntPtr)(vertices.Length * Vector3.SizeInBytes), vertices, BufferUsageHint.StaticDraw); GL.GenBuffers(1, out iboID); GL.BindBuffer(BufferTarget.ElementArrayBuffer, iboID); GL.BufferData(BufferTarget.ElementArrayBuffer, (IntPtr)(indices.Length * sizeof(int)), indices, BufferUsageHint.StaticDraw); vertexShaderID = GL.CreateShader(ShaderType.VertexShader); GL.ShaderSource(vertexShaderID, vertsrc); GL.CompileShader(vertexShaderID); fragmentShaderID = GL.CreateShader(ShaderType.FragmentShader); GL.ShaderSource(fragmentShaderID, fragsrc); GL.CompileShader(fragmentShaderID); programID = GL.CreateProgram(); GL.AttachShader(programID, vertexShaderID); GL.AttachShader(programID, fragmentShaderID); GL.LinkProgram(programID); // Loading texture from embedded resource texsrc = new Bitmap(Assembly.GetEntryAssembly().GetManifestResourceStream("Tutorial_05.example.png")); textureID = GL.GenTexture(); GL.BindTexture(TextureTarget.Texture2D, textureID); GL.TexParameter(TextureTarget.Texture2D, TextureParameterName.TextureMagFilter, (int)All.Linear); GL.TexParameter(TextureTarget.Texture2D, TextureParameterName.TextureMinFilter, (int)All.Linear); GL.TexImage2D(TextureTarget.Texture2D, 0, PixelInternalFormat.Rgba, texsrc.Width, texsrc.Height, 0, PixelFormat.Bgra, PixelType.UnsignedByte, IntPtr.Zero); System.Drawing.Imaging.BitmapData bitmap_data = texsrc.LockBits(new Rectangle(0, 0, texsrc.Width, texsrc.Height), System.Drawing.Imaging.ImageLockMode.ReadOnly, System.Drawing.Imaging.PixelFormat.Format32bppRgb); GL.TexSubImage2D(TextureTarget.Texture2D, 0, 0, 0, texsrc.Width, texsrc.Height, PixelFormat.Bgra, PixelType.UnsignedByte, bitmap_data.Scan0); texsrc.UnlockBits(bitmap_data); GL.Enable(EnableCap.Texture2D); GL.BufferData(BufferTarget.TextureBuffer, (IntPtr)(texcoords.Length * Vector2.SizeInBytes), texcoords, BufferUsageHint.StaticDraw); GL.BindAttribLocation(programID, 0, "position"); GL.BindAttribLocation(programID, 1, "textureCoords"); } protected override void OnResize(EventArgs e) { base.OnResize(e); GL.Viewport(0, 0, ClientRectangle.Width, ClientRectangle.Height); } protected override void OnUpdateFrame(FrameEventArgs e) { base.OnUpdateFrame(e); keyState = Keyboard.GetState(); if (keyState.IsKeyDown(Key.Escape)) { Exit(); } } protected override void OnRenderFrame(FrameEventArgs e) { base.OnRenderFrame(e); // Prepare for background GL.Clear(ClearBufferMask.ColorBufferBit); GL.ClearColor(Color4.Red); // Draw traingles GL.EnableVertexAttribArray(0); GL.EnableVertexAttribArray(1); GL.BindVertexArray(vaoID); GL.UseProgram(programID); GL.BindBuffer(BufferTarget.ArrayBuffer, vboID); GL.VertexAttribPointer(0, 3, VertexAttribPointerType.Float, false, 0, IntPtr.Zero); GL.ActiveTexture(TextureUnit.Texture0); GL.BindTexture(TextureTarget.Texture3D, textureID); GL.BindBuffer(BufferTarget.ElementArrayBuffer, iboID); GL.DrawElements(BeginMode.Triangles, indices.Length, DrawElementsType.UnsignedInt, 0); GL.DisableVertexAttribArray(0); GL.DisableVertexAttribArray(1); SwapBuffers(); } protected override void OnClosed(EventArgs e) { base.OnClosed(e); GL.DeleteVertexArray(vaoID); GL.DeleteBuffer(vboID); } } } I can not remember where do I add GL.Uniform2();
    • By Jens Eckervogt
      Hello everyone
      For @80bserver8 nice job - I have found Google search. How did you port from Javascript WebGL to C# OpenTK.?
      I have been searched Google but it shows f***ing Unity 3D. I really want know how do I understand I want start with OpenTK But I want know where is porting of Javascript and C#?
       
      Thanks!
    • By mike44
      Hi
      I draw in a OpenGL framebuffer. All is fine but it eats FPS (frames per second), hence I wonder if I could execute the framebuffer drawing only every 5-10th loop or so?
      Many thanks
       
    • By cebugdev
      hi all,
      how to implement this type of effect ? 
      Also what is this effect called? this is considered volumetric lighting?
      what are the options of doing this? 
      a. billboard? but i want this to have the 3D effect that when we rotate the camera we can still have that 3d feel.
      b. a transparent 3d mesh? and we can animate it as well?
      need your expert advise.
       
      additional:
      2. how to implement things like fireball projectile (shot from a monster) (billboard texture or a 3d mesh)?
       
      Note: im using OpenGL ES 2.0 on mobile. 
       
      thanks!
  • Advertisement
  • Advertisement
Sign in to follow this  

OpenGL Depth texture + FBO = problem...

This topic is 4027 days old which is more than the 365 day threshold we allow for new replies. Please post a new topic.

If you intended to correct an error in the post then please contact us.

Recommended Posts

Hi, I'm trying to attach a 16-bit depth texture to an FBO, but I keep getting GL_FRAMEBUFFER_UNSUPPORTED_EXT for the FBO status. The depth texture is created with the following parameters: internalformat = GL_DEPTH_COMPONENT16 format = GL_DEPTH_COMPONENT type = GL_UNSIGNED_INT There's another texture already attached as the first colour attachment, which has the following parameters: internalformat = GL_RGBA8 format = GL_RGBA type = GL_UNSIGNED_BYTE Neither of the textures cause any OpenGL errors when they're created. Am I doing something silly like using the wrong type, or is this something deeper? I'd like to not have to hack my way round this problem, so if anyone can shed any light on it, I'll be very thankful... PS. My card is an NVIDIA Geforce 7800, with the latest 93.71 drivers.

Share this post


Link to post
Share on other sites
Advertisement
A little more on this. I dug out a program I wrote last year which used FBOs. I wrote the code below, creating a 24-bit depth render buffer, and it works just fine. Is it just perhaps that a depth texture is unacceptable?

glGenFramebuffersEXT(1, &lFbo);
glBindFramebufferEXT(GL_FRAMEBUFFER_EXT, lFbo);

glGenRenderbuffersEXT(1, &lRb);
glBindRenderbufferEXT(GL_RENDERBUFFER_EXT, lRb);

glRenderbufferStorageEXT(GL_RENDERBUFFER_EXT, GL_DEPTH_COMPONENT24, 1024, 1024);
glFramebufferRenderbufferEXT(GL_FRAMEBUFFER_EXT, GL_DEPTH_ATTACHMENT_EXT, GL_RENDERBUFFER_EXT, lRb);

Share this post


Link to post
Share on other sites
I think it's because nvidia only allows the same depth as the main frame buffer at the moment, be careful though as ATi cards unless they added support for 24bit, only work with 16bit depth in FBOs to make it extra confusing.

Share this post


Link to post
Share on other sites
You know you don't need a renderbuffer for a depth map?

What about GL_CLAMP_TO_EDGE for textures? I thought I remember FBO's in some cases need that set... But I could be wrong. I use GL_FLOAT for my types, but don't think that matters either.

I would ditch the RB if you don't need it. I know that 16bit works cause I have used it myself with a RB before.

Share this post


Link to post
Share on other sites
Quote:
Original post by BiGCyC
I think it's because nvidia only allows the same depth as the main frame buffer at the moment, be careful though as ATi cards unless they added support for 24bit, only work with 16bit depth in FBOs to make it extra confusing.


Recent drivers seem to support the depth_stencil extension so in theory at least 24bit depth is supported on ATI cards now, along with 8bit stencil...

(in practise; I'm not tried it yet and I don't know of anyone how has.. maybe tomorrow/later today... )

Share this post


Link to post
Share on other sites
Quote:
Original post by MARS_999
You know you don't need a renderbuffer for a depth map?

What about GL_CLAMP_TO_EDGE for textures? I thought I remember FBO's in some cases need that set... But I could be wrong. I use GL_FLOAT for my types, but don't think that matters either.

I would ditch the RB if you don't need it. I know that 16bit works cause I have used it myself with a RB before.

Yeah, I know the render buffer works. The point I was making with my second post was that while the render buffer works, a depth texture doesn't, which is puzzling (not mention deeply annoying).

GL_CLAMP_TO_EDGE is set when I use the texture by CgFX, but I can't see why it should affect rendering to the texture. I changed to using GL_FLOAT, which made no difference, unfortunately.

Quote:
Original post by BiGCyC
I think it's because nvidia only allows the same depth as the main frame buffer at the moment, be careful though as ATi cards unless they added support for 24bit, only work with 16bit depth in FBOs to make it extra confusing.

Not sure what bit-depth of depth buffer I'm using; the system is running through a GLUT testbed application, so I guess it could be anything...the program that uses the render buffer actually acquires its RC through Win32, so I know for a fact it's using a 32-bit depth there...perhaps I'll move everything over to Win32 (which is planned anyway, shouldn't take long), and see if that makes any difference...

Share this post


Link to post
Share on other sites
Quote:
Original post by iNsAn1tY
Quote:
Original post by BiGCyC
I think it's because nvidia only allows the same depth as the main frame buffer at the moment, be careful though as ATi cards unless they added support for 24bit, only work with 16bit depth in FBOs to make it extra confusing.

Not sure what bit-depth of depth buffer I'm using; the system is running through a GLUT testbed application, so I guess it could be anything...the program that uses the render buffer actually acquires its RC through Win32, so I know for a fact it's using a 32-bit depth there...perhaps I'll move everything over to Win32 (which is planned anyway, shouldn't take long), and see if that makes any difference...

Turns out it was this. I switched over to using Win32 with a 32-bit depth buffer, and it seems to be working now. Thanks for the replies.

Share this post


Link to post
Share on other sites
Quote:
Original post by iNsAn1tY
Quote:
Original post by iNsAn1tY
Quote:
Original post by BiGCyC
I think it's because nvidia only allows the same depth as the main frame buffer at the moment, be careful though as ATi cards unless they added support for 24bit, only work with 16bit depth in FBOs to make it extra confusing.

Not sure what bit-depth of depth buffer I'm using; the system is running through a GLUT testbed application, so I guess it could be anything...the program that uses the render buffer actually acquires its RC through Win32, so I know for a fact it's using a 32-bit depth there...perhaps I'll move everything over to Win32 (which is planned anyway, shouldn't take long), and see if that makes any difference...

Turns out it was this. I switched over to using Win32 with a 32-bit depth buffer, and it seems to be working now. Thanks for the replies.

Oh. It wasn't that. A bug was causing a different FBO problem, and when I resolved that, this one came back.

If you're rendering to a depth texture (not a render buffer, an actual texture) using an FBO on similar NVIDIA hardware (Geforce 7800 GTX), could you post here?

Share this post


Link to post
Share on other sites
Hi, I'm messing with shadow mapping just now so have some code handy if it helps.

As you can see, I just use GL_DEPTH_COMPONENT as the texture format, so I take it that the current depth buffer setting is used (mine is 24-bit).

The code shows the essentials for creating/freeing everything and how to bind the fbo for rendering. You Probably don't need all of this but I hope some of it helps you, and others..


//-------------------------------------
//create the depth texture - only once
//-------------------------------------
glEnable(GL_TEXTURE_2D);
glGenTextures(1, @DepthTexture);
glBindTexture(GL_TEXTURE_2D, texture);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
glTexEnvi(GL_TEXTURE_ENV, GL_TEXTURE_ENV_MODE, GL_MODULATE);
glCopyTexImage2d(GL_TEXTURE_2D, 0, GL_DEPTH_COMPONENT, 0, 0, aWidth, aHeight, 0);
glBindTexture(GL_TEXTURE_2D, 0);
glDisable(GL_TEXTURE_2D);

//-------------------------------------
//init fbo with depth texture - only once
//-------------------------------------
glGenFramebuffersEXT(1, @fbo);
glBindFramebufferEXT(GL_FRAMEBUFFER_EXT, fbo);
glFramebufferTexture2DEXT(GL_FRAMEBUFFER_EXT,
GL_DEPTH_ATTACHMENT_EXT, GL_TEXTURE_2D, DepthTexture, 0);
glBindFramebufferEXT(GL_FRAMEBUFFER_EXT, 0);

//-------------------------------------
//bind the fbo for rendering - every frame
//-------------------------------------
glBindFramebufferEXT(GL_FRAMEBUFFER_EXT, fbo);
glDrawBuffer(GL_NONE);
glReadBuffer(GL_NONE);

//-------------------------------------
//set up viewport etc. and Draw stuff - every frame
//-------------------------------------
Dorender();

//-------------------------------------
//unbind the fbo - every frame
//-------------------------------------
glBindFramebufferEXT(GL_FRAMEBUFFER_EXT, 0);

//-------------------------------------
//delete the fbo and depth texture - only once
//-------------------------------------
glDeleteFramebuffersEXT(1, @fbo);
glDeleteTextures(1, @DepthTexture);

Share this post


Link to post
Share on other sites
Hey Fig, you don't need to do
glCopyTexImage2d when you setup a FBO or use them... Just thought I would point that out.

Share this post


Link to post
Share on other sites
Sign in to follow this  

  • Advertisement