• Advertisement
  • Popular Tags

  • Popular Now

  • Advertisement
  • Similar Content

    • By codelyoko373
      I wasn't sure if this would be the right place for a topic like this so sorry if it isn't.
      I'm currently working on a project for Uni using FreeGLUT to make a simple solar system simulation. I've got to the point where I've implemented all the planets and have used a Scene Graph to link them all together. The issue I'm having with now though is basically the planets and moons orbit correctly at their own orbit speeds.
      I'm not really experienced with using matrices for stuff like this so It's likely why I can't figure out how exactly to get it working. This is where I'm applying the transformation matrices, as well as pushing and popping them. This is within the Render function that every planet including the sun and moons will have and run.
      if (tag != "Sun") { glRotatef(orbitAngle, orbitRotation.X, orbitRotation.Y, orbitRotation.Z); } glPushMatrix(); glTranslatef(position.X, position.Y, position.Z); glRotatef(rotationAngle, rotation.X, rotation.Y, rotation.Z); glScalef(scale.X, scale.Y, scale.Z); glDrawElements(GL_TRIANGLES, mesh->indiceCount, GL_UNSIGNED_SHORT, mesh->indices); if (tag != "Sun") { glPopMatrix(); } The "If(tag != "Sun")" parts are my attempts are getting the planets to orbit correctly though it likely isn't the way I'm meant to be doing it. So I was wondering if someone would be able to help me? As I really don't have an idea on what I would do to get it working. Using the if statement is truthfully the closest I've got to it working but there are still weird effects like the planets orbiting faster then they should depending on the number of planets actually be updated/rendered.
    • By Jens Eckervogt
      Hello everyone, 
      I have problem with texture
      using System; using OpenTK; using OpenTK.Input; using OpenTK.Graphics; using OpenTK.Graphics.OpenGL4; using System.Drawing; using System.Reflection; namespace Tutorial_05 { class Game : GameWindow { private static int WIDTH = 1200; private static int HEIGHT = 720; private static KeyboardState keyState; private int vaoID; private int vboID; private int iboID; private Vector3[] vertices = { new Vector3(-0.5f, 0.5f, 0.0f), // V0 new Vector3(-0.5f, -0.5f, 0.0f), // V1 new Vector3(0.5f, -0.5f, 0.0f), // V2 new Vector3(0.5f, 0.5f, 0.0f) // V3 }; private Vector2[] texcoords = { new Vector2(0, 0), new Vector2(0, 1), new Vector2(1, 1), new Vector2(1, 0) }; private int[] indices = { 0, 1, 3, 3, 1, 2 }; private string vertsrc = @"#version 450 core in vec3 position; in vec2 textureCoords; out vec2 pass_textureCoords; void main(void) { gl_Position = vec4(position, 1.0); pass_textureCoords = textureCoords; }"; private string fragsrc = @"#version 450 core in vec2 pass_textureCoords; out vec4 out_color; uniform sampler2D textureSampler; void main(void) { out_color = texture(textureSampler, pass_textureCoords); }"; private int programID; private int vertexShaderID; private int fragmentShaderID; private int textureID; private Bitmap texsrc; public Game() : base(WIDTH, HEIGHT, GraphicsMode.Default, "Tutorial 05 - Texturing", GameWindowFlags.Default, DisplayDevice.Default, 4, 5, GraphicsContextFlags.Default) { } protected override void OnLoad(EventArgs e) { base.OnLoad(e); CursorVisible = true; GL.GenVertexArrays(1, out vaoID); GL.BindVertexArray(vaoID); GL.GenBuffers(1, out vboID); GL.BindBuffer(BufferTarget.ArrayBuffer, vboID); GL.BufferData(BufferTarget.ArrayBuffer, (IntPtr)(vertices.Length * Vector3.SizeInBytes), vertices, BufferUsageHint.StaticDraw); GL.GenBuffers(1, out iboID); GL.BindBuffer(BufferTarget.ElementArrayBuffer, iboID); GL.BufferData(BufferTarget.ElementArrayBuffer, (IntPtr)(indices.Length * sizeof(int)), indices, BufferUsageHint.StaticDraw); vertexShaderID = GL.CreateShader(ShaderType.VertexShader); GL.ShaderSource(vertexShaderID, vertsrc); GL.CompileShader(vertexShaderID); fragmentShaderID = GL.CreateShader(ShaderType.FragmentShader); GL.ShaderSource(fragmentShaderID, fragsrc); GL.CompileShader(fragmentShaderID); programID = GL.CreateProgram(); GL.AttachShader(programID, vertexShaderID); GL.AttachShader(programID, fragmentShaderID); GL.LinkProgram(programID); // Loading texture from embedded resource texsrc = new Bitmap(Assembly.GetEntryAssembly().GetManifestResourceStream("Tutorial_05.example.png")); textureID = GL.GenTexture(); GL.BindTexture(TextureTarget.Texture2D, textureID); GL.TexParameter(TextureTarget.Texture2D, TextureParameterName.TextureMagFilter, (int)All.Linear); GL.TexParameter(TextureTarget.Texture2D, TextureParameterName.TextureMinFilter, (int)All.Linear); GL.TexImage2D(TextureTarget.Texture2D, 0, PixelInternalFormat.Rgba, texsrc.Width, texsrc.Height, 0, PixelFormat.Bgra, PixelType.UnsignedByte, IntPtr.Zero); System.Drawing.Imaging.BitmapData bitmap_data = texsrc.LockBits(new Rectangle(0, 0, texsrc.Width, texsrc.Height), System.Drawing.Imaging.ImageLockMode.ReadOnly, System.Drawing.Imaging.PixelFormat.Format32bppRgb); GL.TexSubImage2D(TextureTarget.Texture2D, 0, 0, 0, texsrc.Width, texsrc.Height, PixelFormat.Bgra, PixelType.UnsignedByte, bitmap_data.Scan0); texsrc.UnlockBits(bitmap_data); GL.Enable(EnableCap.Texture2D); GL.BufferData(BufferTarget.TextureBuffer, (IntPtr)(texcoords.Length * Vector2.SizeInBytes), texcoords, BufferUsageHint.StaticDraw); GL.BindAttribLocation(programID, 0, "position"); GL.BindAttribLocation(programID, 1, "textureCoords"); } protected override void OnResize(EventArgs e) { base.OnResize(e); GL.Viewport(0, 0, ClientRectangle.Width, ClientRectangle.Height); } protected override void OnUpdateFrame(FrameEventArgs e) { base.OnUpdateFrame(e); keyState = Keyboard.GetState(); if (keyState.IsKeyDown(Key.Escape)) { Exit(); } } protected override void OnRenderFrame(FrameEventArgs e) { base.OnRenderFrame(e); // Prepare for background GL.Clear(ClearBufferMask.ColorBufferBit); GL.ClearColor(Color4.Red); // Draw traingles GL.EnableVertexAttribArray(0); GL.EnableVertexAttribArray(1); GL.BindVertexArray(vaoID); GL.UseProgram(programID); GL.BindBuffer(BufferTarget.ArrayBuffer, vboID); GL.VertexAttribPointer(0, 3, VertexAttribPointerType.Float, false, 0, IntPtr.Zero); GL.ActiveTexture(TextureUnit.Texture0); GL.BindTexture(TextureTarget.Texture3D, textureID); GL.BindBuffer(BufferTarget.ElementArrayBuffer, iboID); GL.DrawElements(BeginMode.Triangles, indices.Length, DrawElementsType.UnsignedInt, 0); GL.DisableVertexAttribArray(0); GL.DisableVertexAttribArray(1); SwapBuffers(); } protected override void OnClosed(EventArgs e) { base.OnClosed(e); GL.DeleteVertexArray(vaoID); GL.DeleteBuffer(vboID); } } } I can not remember where do I add GL.Uniform2();
    • By Jens Eckervogt
      Hello everyone
      For @80bserver8 nice job - I have found Google search. How did you port from Javascript WebGL to C# OpenTK.?
      I have been searched Google but it shows f***ing Unity 3D. I really want know how do I understand I want start with OpenTK But I want know where is porting of Javascript and C#?
       
      Thanks!
    • By mike44
      Hi
      I draw in a OpenGL framebuffer. All is fine but it eats FPS (frames per second), hence I wonder if I could execute the framebuffer drawing only every 5-10th loop or so?
      Many thanks
       
    • By cebugdev
      hi all,
      how to implement this type of effect ? 
      Also what is this effect called? this is considered volumetric lighting?
      what are the options of doing this? 
      a. billboard? but i want this to have the 3D effect that when we rotate the camera we can still have that 3d feel.
      b. a transparent 3d mesh? and we can animate it as well?
      need your expert advise.
       
      additional:
      2. how to implement things like fireball projectile (shot from a monster) (billboard texture or a 3d mesh)?
       
      Note: im using OpenGL ES 2.0 on mobile. 
       
      thanks!
  • Advertisement
  • Advertisement
Sign in to follow this  

OpenGL How to make my game run on all graphics cards

This topic is 2621 days old which is more than the 365 day threshold we allow for new replies. Please post a new topic.

If you intended to correct an error in the post then please contact us.

Recommended Posts

I've coded a small demo of a game I'm creating. And I gave it to some friends, so they could test it. The problem? Well, it only ran on one of them, and still, with some minor graphic glitches.

The question. What methods are there to make sure an OpenGL game runs in lots of different hardware, without having to buy all kinds of graphic cards to test.
It is very difficult to debug errors when they only happen in different hardware.

For example, I tested it in a very powerful pc, with an ATI HD 5800, and while it ran without errors, the characters won't appear, only their shadows and the terrain...

Another example, the game would crash on all Intel cards. After hours of testing, it seems that using display lists was the reason, why, I don't know, but removing the display list code, and leaving only the glBegin (stuff) glEnd commands worked.
Well, didn't work actually, but at least the game didn't crash, only showed a black screen. I use shaders for everything, but they are all very simple. I even tested with one that just output red color to gl_FragColor and still, nothing...
Still, that same pc is able to run HL2, so, why?

Some details that may help, the game is coded in Delphi, uses OpenGL, with shaders for everything (no fixed pipeline suport), and display lists for everything (terrain + characters).

Also all the GLSL shaders make heavy use of #define and #ifdef #endif, could this be a problem on some drivers?

All features can be turned off (shadows, post processing, etc), and I did turn it off on the Intels, and still, crash.
I turned the characters off, and crash. Only when I turned the terrain off too, it worked, but of course, then, there was nothing to render..

Anyone has ideas, how to find the problems?
How to debug this kind of problems in different hardware?
Any tips about Intel + GLSL/Display list problems?

[Edited by - Relfos on December 7, 2010 5:03:36 AM]

Share this post


Link to post
Share on other sites
Advertisement
Use only GL 1.1 features.

HL2 uses Direct3D. You can use that also instead of GL.
Intel drivers are quite bad. Even the D3D driver sucks. Intel is great for surfing the web.

Share this post


Link to post
Share on other sites
You can't make sure that it runs on all PCs, because some PCs out there (e.g. with OEM or MS drivers) don't even have OpenGL support.

That aside, you've taken a good first step: get it out to as many people as possible so that you cover a good baseline of hardware and OSs (and OS versions). The sooner you can do this, the sooner you'll start identifying any potential problems.

If your friends are willing to help out here, you can make small demo apps containing isolated portions of your main code, which can help with identifying specific areas that do or don't work.

A technique I've found very useful in the past is to code on a machine with an Intel graphics chip in. It sounds perverse and counter-intuitive, but my reasoning is that if it works on that, it will stand a better chance of working on anything else.

You could also give your friends (or ask them to download) a copy of glIntercept, then get them to send you the logs for a frame. This can help uncover a lot of other potential problems. If not, you can put a lot of glGetError calls throughout your code and write your own logs.

If any of your friends are comfortable doing this, you can also give them the source code and ask them to run it in a debugger.

Getting to know the underlying hardware better is never a bad idea. IMO one serious weakness of OpenGL is that it's a bit too high-level; it abstracts the hardware a bit too much. What this means is that it's often possible to write really badly incorrect code and still have it run, at least on your development machine. And when weird things or crashes start happening, it can be quite difficult to pin down the exact cause.

Finally take a good look through your code and check all the GL extensions you're using. Make sure that you're checking the return values for your wglGetProcAddress (or equivalent) calls, that you're also checking the extension string, and that you're not making any assumptions about extensions being available.

Share this post


Link to post
Share on other sites
We're using OpenGL api with last features like in D3D10/D3D11 - and of course they're also available just on D3D10/D3D11 gpus ... and thus we say, that our technology targets D3D10/D3D11 gpus. (E.g. we're using OpenGL 3.2/3.3/4.0)

E.g. what am I trying to say?
Decide what are your target specifications (GeForce 8 series/AMD Radeon HD 2 series ... GeForce GTX 2 series, AMD Radeon HD 5 series ... etc.) and develop for them. You can't make your game run on all GPUs unless you use very old OpenGL 1.1 specs (and you rather won't, because some current drivers might get to problems with them ... because they're built against these specs).

Share this post


Link to post
Share on other sites
Yes, I know about that, sorry that maybe I did not explain myself correctly. My target hardware is anything that supports GLSL (I think should be OpenGL 1.5, if I'm not mistaken).
But in all hardware that I tested, all meet the requirements, however, only one of them did run the game, the others just crashed or showed a black screen.
I'm probably doing something with OpengGL that is invalid but my graphic card accepts it.

Today I just found that Intel drivers crashed because of my shaders that are full of #defines and #ifdef, #undef, etc. So it was not my problem after all, but faulty drivers... I now pre-process the shader code myself, and that seems to work, however, instead of crashing, now it shows a black screen. Why I am insisting on running on this Intel? Well, the owner of it, a friend of mine, is also a coder, and uses OpenGL and shaders, and his programs work in that laptop.

mhagain did give lots of useful advice, thanks!

"Make sure that you're checking the return values for your wglGetProcAddress (or equivalent) calls, that you're also checking the extension string, and that you're not making any assumptions about extensions being available."
Yes, my code is very strict about that, if any extension required is not found, it reports it to a log file and shows an error to the user. Also I'm using a custom OpenGL dll loader that when a function is not found, it tries to find the ARB version, and if it still fails, the EXT version.

Share this post


Link to post
Share on other sites
Are you using a nvidia or ati video card for development ? Nvidia cards are known to be more slack about the gsls syntax, whereas ati requires the correct syntax. I would sugguest to buy atleast one video card from every major video chip manufacturer you want to support (atleast nvidia & ati) and avoid high-end cards when you want to support mid-range products.

Then log everything about the video card at the beginning (including all extensions). You should use some kind of debug build which checks frequently gl errors and logs them. But be careful, checking for gl errors can be a performance killer and should be used sparely in a release build (note: flush the log file after each entry, else important log entries could be gone due internal buffering when your program crashes).

Share this post


Link to post
Share on other sites
Quote:
Original post by Ashaman73
Are you using a nvidia or ati video card for development ? Nvidia cards are known to be more slack about the gsls syntax, whereas ati requires the correct syntax.

I am using an ATI card, I bought it specially because of I known that NVidia are less strict about the sintax. However it seems like buying a cheap Intel would be even better..

Quote:
Original post by Ashaman73
Then log everything about the video card at the beginning (including all extensions).

This was already done, I used it to confirm that all cards tested meet all requirements of the game.
[/quote]

Quote:
Original post by Ashaman73
You should use some kind of debug build which checks frequently gl errors and logs them.

Ok, I am doing this now, and debug build that calls glGetError after each openGL call and halts the game whenever an error is found, dumping the callstack, line info, etc to the log. Hopefully this will help find the problems!


A GLSL problem I found is, typecasting of a mat4 to a mat3, something like:

mat4 modelMatrix;
mat3 normalMatrix = mat3(modelMatrix);

Is this valid GLSL?
Because both ATI and NVidia accept it, and Intel doesn't, even though it seems something that should be allowed, it is just something basic..

Share this post


Link to post
Share on other sites
A few words of warning that might make your life easier:
- Intel is very picky about loops and arrays in GLSL. I recently had to unroll my lighting loop by hand, because it would mis-compile on Intel 4500 drivers.

- Intel drivers do not support vertex texture fetch. This sucks.

- arrays-of-structures are treated differently between Intel, Ati and Nvidia. It's nigh impossible to make them work reliably, so avoid them if at all possible! (Plain structures should be fine).

- 1-length arrays may render incorrectly on Nvidia cards. Intel cards will downgrade them to non-array datatypes, which is a nasty surprise when binding uniforms by name.

- Ati and Nvidia cards handle conditional texture sampling differently. Nvidia is stricter here: you might notice seams on texture boundaries on Nvidia unless you use textureGrad.

- Ati drivers may fail to compile some specific loop constructs. Sometimes shuffling code around will get it them to work.

- Ati drivers have historically had problems when copying/blitting depth and stencil textures. YMMV, but I haven't managed to get depth blits to work, ever.

Share this post


Link to post
Share on other sites
Quote:
Original post by Relfos
A GLSL problem I found is, typecasting of a mat4 to a mat3, something like:

mat4 modelMatrix;
mat3 normalMatrix = mat3(modelMatrix);

Is this valid GLSL?
Because both ATI and NVidia accept it, and Intel doesn't, even though it seems something that should be allowed, it is just something basic..


That's not valid with GLSL 1.10
You need GLSL 1.20

Share this post


Link to post
Share on other sites
Quote:
Original post by V-man
That's not valid with GLSL 1.10
You need GLSL 1.20

I see, that explains it, thank you!
And thanks, Fiddler, that list is very useful to know the problems with all three.
Also, after some hours of intense debugging, I managed to make my game run on all my friends pcs, it is all ok now.

Share this post


Link to post
Share on other sites
Sign in to follow this  

  • Advertisement