Jump to content

  • Log In with Google      Sign In   
  • Create Account


#ActualStanLee

Posted 14 March 2013 - 03:18 PM

I am trying to do some raytracing on the GPU via the compute shader in OpenGL and I came across a very strange behaviour.

For every pixel in the screen I launch a compute shader invocation and this is how the compute shader looks like:

 

 

#version 430

struct Camera{
    vec4    pos, dir, up, xAxis ;
    float   focalLength;
    float   pW, pH;
};

struct Sphere{
    vec4    position;
    float   radius;
};

struct Ray{
    vec3    origin;
    vec3    dir;
};

uniform Camera      camera;
uniform uint        width;
uniform uint        height;

uniform image2D outputTexture;

float hitSphere(Ray r, Sphere s){
    
    float s_ov = dot(r.origin, r.dir);
    float s_mv = dot(s.position.xyz, r.dir);
    float s_mm = dot(s.position.xyz, s.position.xyz);
    float s_mo = dot(s.position.xyz, r.origin);
    float s_oo = dot(r.origin, r.origin);
    
    float d = s_ov*s_ov-2.0f*s_ov*s_mv+s_mv*s_mv-s_mm+2.0f*s_mo*s_oo+s.radius*s.radius;
    
    if(d < 0){
        return -1.0f;
    } else if(d == 0){
        return (s_mv-s_ov);
    } else {
        float t1 = 0, t2 = 0;
        t1 = s_mv-s_ov;
        
        t2 = (t1-sqrt(d));
        t1 = (t1+sqrt(d));
        
        return t1>t2? t2 : t1 ;
    }
}

Ray initRay(uint x, uint y, Camera cam){
    Ray ray;
    ray.origin = cam.pos.xyz;
    
    ray.dir = cam.dir.xyz * cam.focalLength + vec3(1, 0, 0)*( float(x-(width/2)))*cam.pW
                              + cam.up.xyz * (float(y-(height/2))*cam.pH);
                              
    ray.dir = normalize(ray.dir);
                              
    return ray;
}

layout (local_size_x = 16, local_size_y = 16, local_size_z = 1) in;
void main(){
    uint x = gl_GlobalInvocationID.x;
    uint y = gl_GlobalInvocationID.y;
    
    if(x < 1024 && y < 768){
        float t = 0.0f;

        Ray r = initRay(x, y, camera);
        
        Sphere sp ={vec4(0.0f, 0.0f, 20.0f, 0.0f), 2.0f};

        t = hitSphere(r, sp);
        
        if(t <= -0.001f){
            imageStore(outputTexture, ivec2(x, y), vec4(0.0, 0.0, 1.0, 1.0));
        } else {
            imageStore(outputTexture, ivec2(x, y), vec4(0.0, 1.0, 0.0, 1.0));
        }
        
    }
}
 

Rendering on the GPU yields the following broken image:

quarter.png

Rendering on the CPU with the same algorithm yields this image:

normal.png

I can't figure out the problem since I just copied and pasted the "hitSphere()" and "initRay()" functions into my compute shader. First I thought I haven't dispatched enough work groups, but then the background wouldn't be blue, so this can't be the case. This is how I dispatch my compute shader:

#define WORK_GROUP_SIZE 16
//width = 1024, height = 768
void OpenGLRaytracer::renderScene(int width, int height){
    glUseProgram(_progID);

    glDispatchCompute(width/WORK_GROUP_SIZE, height/WORK_GROUP_SIZE,1);

    glMemoryBarrier(GL_TEXTURE_FETCH_BARRIER_BIT);
}

Then I changed the position of the sphere in x direction to the right:

half1.png

In y direction to the top:

half2.png

And in both directions (right and top):

full.png

When I change the position far enough in both directions to the left and to the bottom, then the sphere actually disappears. It seems that all the calculations on the GPU only work in one quarter of the image (top-right) and happen to yield false results in the other three quarters.

 

I am totally clueless at the moment and don't even know how to start fixing this.


#1StanLee

Posted 14 March 2013 - 03:15 PM

I am trying to do some raytracing on the GPU via the compute shader in OpenGL and I came across a very strange behaviour.

For every pixel in the screen I launch a compute shader invocation and this is how the compute shader looks like:

 

 

#version 430

struct Camera{
    vec4    pos, dir, up, xAxis ;
    float   focalLength;
    float   pW, pH;
};

struct Sphere{
    vec4    position;
    float   radius;
};

struct Ray{
    vec3    origin;
    vec3    dir;
};

uniform Camera      camera;
uniform uint        width;
uniform uint        height;

uniform image2D outputTexture;

float hitSphere(Ray r, Sphere s){
    
    float s_ov = dot(r.origin, r.dir);
    float s_mv = dot(s.position.xyz, r.dir);
    float s_mm = dot(s.position.xyz, s.position.xyz);
    float s_mo = dot(s.position.xyz, r.origin);
    float s_oo = dot(r.origin, r.origin);
    
    float d = s_ov*s_ov-2.0f*s_ov*s_mv+s_mv*s_mv-s_mm+2.0f*s_mo*s_oo+s.radius*s.radius;
    
    if(d < 0){
        return -1.0f;
    } else if(d == 0){
        return (s_mv-s_ov);
    } else {
        float t1 = 0, t2 = 0;
        t1 = s_mv-s_ov;
        
        t2 = (t1-sqrt(d));
        t1 = (t1+sqrt(d));
        
        return t1>t2? t2 : t1 ;
    }
}

Ray initRay(uint x, uint y, Camera cam){
    Ray ray;
    ray.origin = cam.pos.xyz;
    
    ray.dir = cam.dir.xyz * cam.focalLength + vec3(1, 0, 0)*( float(x-(width/2)))*cam.pW
                              + cam.up.xyz * (float(y-(height/2))*cam.pH);
                              
    ray.dir = normalize(ray.dir);
                              
    return ray;
}

layout (local_size_x = 16, local_size_y = 16, local_size_z = 1) in;
void main(){
    uint x = gl_GlobalInvocationID.x;
    uint y = gl_GlobalInvocationID.y;
    
    if(x < 1024 && y < 768){
        float t = 0.0f;

        Ray r = initRay(x, y, camera);
        
        Sphere sp ={vec4(0.0f, 0.0f, 20.0f, 0.0f), 2.0f};

        t = hitSphere(r, sp);
        
        if(t <= -0.001f){
            imageStore(outputTexture, ivec2(x, y), vec4(0.0, 0.0, 1.0, 1.0));
        } else {
            imageStore(outputTexture, ivec2(x, y), vec4(0.0, 1.0, 0.0, 1.0));
        }
        
    }
}
 

Rendering on the GPU yields the following broken image:

quarter.png

Rendering on the CPU with the same algorithm yields this image:

normal.png

I can't figure out the problem since I just copied and pasted the "hitSphere()" and "initRay()" functions into my compute shader. First I thought I haven't dispatched enough work groups, but then the background wouldn't be blue, so this can't be the case. This is how I dispatch my compute shader:

#define WORK_GROUP_SIZE 16
void OpenGLRaytracer::renderScene(int width, int height){
    glUseProgram(_progID);

    glDispatchCompute(width/WORK_GROUP_SIZE, height/WORK_GROUP_SIZE,1);

    glMemoryBarrier(GL_TEXTURE_FETCH_BARRIER_BIT);
}

Then I changed the position of the sphere in x direction to the right:

half1.png

In y direction to the top:

half2.png

And in both directions (right and top):

full.png

When I change the position far enough in both directions to the left and to the bottom, then the sphere actually disappears. It seems that all the calculations on the GPU only work in one quarter of the image (top-right) and happen to yield false results in the other three quarters.

 

I am totally clueless at the moment and don't even know how to start fixing this.

 


PARTNERS