My short-term goal was to create a 100% reflective sphere more or less in the center of the scene and a "full bright" opaque colored plane below it to demonstrate the sphere reflecting the rays.
For my test case at the moment I am only rendering an opaque sphere.
Unfortunately, for some reason whenever I change the sphere's origin in the scene instead of simply translating to that position it seems to stretch across whatever axis I'm moving it on.
At first I pondered whether my ray normals weren't being generated right but it looks correct to me.
void generateRays(){ // Generate rays for(uint32 i(0); i < width; ++i) { for(uint32 j(0); j < height; ++j) { Vec4<float> p((float)i, (float)j, 0.0f); Vec4<float> d = p - origin; pRays[j].setDone(false); d.Normalize(); pRays[j].setOrigin(origin); pRays[j].setDirection(d); } }}
My next thought was maybe it's the sphere-ray intersection... again I poured over this for several hours but nothing sticks out.
bool testRay(jb_ray* ray){ Vec4<float> dir = ray->getDirection(); Vec4<float> org = ray->getOrigin(); // quadratic equation. float a = dir * dir; dir *= 2; Vec4<float> omo = org - origin; float b = dir * omo; float c = omo*omo - radius*radius; float quad = b*b-4*a*c; float time0 = -1.0f; float time1 = -1.0f; if(quad > 0) { time0 = (-b + sqrt(quad)) / (2*a); time1 = (-b - sqrt(quad)) / (2*a); } else if(quad == 0) time0 = (-b / (2*a)); else { return false; } Vec4<float> t0Origin ( (ray->getOrigin().x + ray->getDirection().x) * time0, (ray->getOrigin().y + ray->getDirection().y) * time0, (ray->getOrigin().z + ray->getDirection().z) * time0 ); ray->setOrigin(t0Origin); // Calculate new ray normal. Vec4<float> t0Normal(t0Origin); t0Normal -= origin; float invRadius = 1.0f/radius; t0Normal *= invRadius; t0Normal.Normalize(); ray->setDirection(t0Normal); ray->setColor(diffuse); ray->incReflections(); // Lets ignore secondary intersections... for now.. // In this case it would be the point the // ray leaves the sphere. /* Vec4<float> t1Normal (ray->getOrigin().x + ray->getDirection().x, ray->getOrigin().y + ray->getDirection().y, ray->getOrigin().z + ray->getDirection().z); t1Normal *= t1; t1Normal - origin; t1Normal *= iRad; t1Normal.Normalize(); */ return true;}
My render routine is pretty straight forward (though inefficient), generate the rays, and trace them by checking if there's an intersection between every object in the scene and set the pixel color to corresponding the ray's resulting color.
void traceRays(){ for(uint32 i(0); i < width; ++i) { for(uint32 j(0); j < height; ++j) { jb_ray* ray = &pRays[j]; bool intersected = true; while(intersected) { if(ray->isDone()) break; if(ray->numReflections() > MAX_RAY_REFLECTIONS) break; if(shapes.size() == 0) break; for(uint32 k(0); k < shapes.size(); ++k) { intersected = shapes[k]->testRay(ray); if(intersected) if(shapes[k]->isOpaque()) break; } } } }}void Render(){ glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT); glMatrixMode(GL_PROJECTION); glLoadIdentity();// glViewport(0, 0, SCREEN_WIDTH, SCREEN_HEIGHT); // We'll just ignore resizing for now... glMatrixMode(GL_PROJECTION); glLoadIdentity(); glMatrixMode(GL_MODELVIEW); glLoadIdentity(); rayGen.generateRays(); rayGen.traceRays(); for(uint32 i(0); i < SCREEN_WIDTH; ++i) { for(uint32 j(0); j < SCREEN_HEIGHT; ++j) { jb_ray* ray = rayGen.getRay(i, j); pixels[j*SCREEN_WIDTH + i] = ray->getColor(); } } glDrawPixels(SCREEN_WIDTH, SCREEN_HEIGHT, GL_RGBA, GL_UNSIGNED_BYTE, pixels); glutSwapBuffers();}
Any help would be greatly appreciated :) Thank you!