Jump to content
  • Advertisement

congard

Member
  • Content Count

    3
  • Joined

  • Last visited

Community Reputation

0 Neutral

About congard

  • Rank
    Newbie

Personal Information

  • Interests
    Education
    Programming
  1. Solved: https://stackoverflow.com/questions/53145377/incorrect-tracing-with-sslr-screen-space-local-reflections
  2. Hello! I tried to implement the Morgan's McGuire method, but my attempts failed. He described his method here: Screen Space Ray Tracing. Below is my code and screenshot. SSLR fragment shader: #version 330 core uniform sampler2D normalMap; // in view space uniform sampler2D depthMap; // in view space uniform sampler2D colorMap; uniform sampler2D reflectionStrengthMap; uniform mat4 projection; uniform mat4 inv_projection; in vec2 texCoord; layout (location = 0) out vec4 fragColor; vec3 calcViewPosition(in vec2 texCoord) { // Combine UV & depth into XY & Z (NDC) vec3 rawPosition = vec3(texCoord, texture(depthMap, texCoord).r); // Convert from (0, 1) range to (-1, 1) vec4 ScreenSpacePosition = vec4(rawPosition * 2 - 1, 1); // Undo Perspective transformation to bring into view space vec4 ViewPosition = inv_projection * ScreenSpacePosition; ViewPosition.y *= -1; // Perform perspective divide and return return ViewPosition.xyz / ViewPosition.w; } // By Morgan McGuire and Michael Mara at Williams College 2014 // Released as open source under the BSD 2-Clause License // http://opensource.org/licenses/BSD-2-Clause #define point2 vec2 #define point3 vec3 float distanceSquared(vec2 a, vec2 b) { a -= b; return dot(a, a); } // Returns true if the ray hit something bool traceScreenSpaceRay( // Camera-space ray origin, which must be within the view volume point3 csOrig, // Unit length camera-space ray direction vec3 csDir, // A projection matrix that maps to pixel coordinates (not [-1, +1] // normalized device coordinates) mat4x4 proj, // The camera-space Z buffer (all negative values) sampler2D csZBuffer, // Dimensions of csZBuffer vec2 csZBufferSize, // Camera space thickness to ascribe to each pixel in the depth buffer float zThickness, // (Negative number) float nearPlaneZ, // Step in horizontal or vertical pixels between samples. This is a float // because integer math is slow on GPUs, but should be set to an integer >= 1 float stride, // Number between 0 and 1 for how far to bump the ray in stride units // to conceal banding artifacts float jitter, // Maximum number of iterations. Higher gives better images but may be slow const float maxSteps, // Maximum camera-space distance to trace before returning a miss float maxDistance, // Pixel coordinates of the first intersection with the scene out point2 hitPixel, // Camera space location of the ray hit out point3 hitPoint) { // Clip to the near plane float rayLength = ((csOrig.z + csDir.z * maxDistance) > nearPlaneZ) ? (nearPlaneZ - csOrig.z) / csDir.z : maxDistance; point3 csEndPoint = csOrig + csDir * rayLength; // Project into homogeneous clip space vec4 H0 = proj * vec4(csOrig, 1.0); vec4 H1 = proj * vec4(csEndPoint, 1.0); float k0 = 1.0 / H0.w, k1 = 1.0 / H1.w; // The interpolated homogeneous version of the camera-space points point3 Q0 = csOrig * k0, Q1 = csEndPoint * k1; // Screen-space endpoints point2 P0 = H0.xy * k0, P1 = H1.xy * k1; // If the line is degenerate, make it cover at least one pixel // to avoid handling zero-pixel extent as a special case later P1 += vec2((distanceSquared(P0, P1) < 0.0001) ? 0.01 : 0.0); vec2 delta = P1 - P0; // Permute so that the primary iteration is in x to collapse // all quadrant-specific DDA cases later bool permute = false; if (abs(delta.x) < abs(delta.y)) { // This is a more-vertical line permute = true; delta = delta.yx; P0 = P0.yx; P1 = P1.yx; } float stepDir = sign(delta.x); float invdx = stepDir / delta.x; // Track the derivatives of Q and k vec3 dQ = (Q1 - Q0) * invdx; float dk = (k1 - k0) * invdx; vec2 dP = vec2(stepDir, delta.y * invdx); // Scale derivatives by the desired pixel stride and then // offset the starting values by the jitter fraction dP *= stride; dQ *= stride; dk *= stride; P0 += dP * jitter; Q0 += dQ * jitter; k0 += dk * jitter; // Slide P from P0 to P1, (now-homogeneous) Q from Q0 to Q1, k from k0 to k1 point3 Q = Q0; // Adjust end condition for iteration direction float end = P1.x * stepDir; float k = k0, stepCount = 0.0, prevZMaxEstimate = csOrig.z; float rayZMin = prevZMaxEstimate, rayZMax = prevZMaxEstimate; float sceneZMax = rayZMax + 100; for (point2 P = P0; ((P.x * stepDir) <= end) && (stepCount < maxSteps) && ((rayZMax < sceneZMax - zThickness) || (rayZMin > sceneZMax)) && (sceneZMax != 0); P += dP, Q.z += dQ.z, k += dk, ++stepCount) { rayZMin = prevZMaxEstimate; rayZMax = (dQ.z * 0.5 + Q.z) / (dk * 0.5 + k); prevZMaxEstimate = rayZMax; if (rayZMin > rayZMax) { float t = rayZMin; rayZMin = rayZMax; rayZMax = t; } hitPixel = permute ? P.yx : P; // You may need hitPixel.y = csZBufferSize.y - hitPixel.y; here if your vertical axis // is different than ours in screen space sceneZMax = texelFetch(csZBuffer, ivec2(hitPixel), 0).r; } // Advance Q based on the number of steps Q.xy += dQ.xy * stepCount; hitPoint = Q * (1.0 / k); return (rayZMax >= sceneZMax - zThickness) && (rayZMin < sceneZMax); } void main() { vec3 normal = texture(normalMap, texCoord).xyz * 2.0 - 1.0; vec3 viewPos = calcViewPosition(texCoord); // Reflection vector vec3 reflected = normalize(reflect(normalize(viewPos), normalize(normal))); vec2 hitPixel; vec3 hitPoint; bool tssr = traceScreenSpaceRay( viewPos, reflected, projection, depthMap, vec2(1366, 768), 0.0, // zThickness -1.0, // nearPlaneZ 1.0, // stride 0.0, // jitter 32, // maxSteps 32, // maxDistance hitPixel, hitPoint ); //fragColor = texture(colorMap, hitPixel); if (tssr) fragColor = mix(texture(colorMap, texCoord), texture(colorMap, hitPixel), texture(reflectionStrengthMap, texCoord).r); else fragColor = texture(colorMap, texCoord); } Screenshot: I create a projection matrix like this: glm::perspective(glm::radians(90.0f), (float) WIN_W / (float) WIN_H, 1.0f, 32.0f) This is what will happen if I display the image like this fragColor = texture(colorMap, hitPixel) colorMap: normalMap: depthMap: What am I doing wrong? Perhaps I misunderstand the value of csOrig, csDir and zThickness, so I would be glad if you could help me understand what these variables are.
  3. Hello! During the implementation of SSLR, I ran into a problem: only objects that are far from the reflecting surface are reflected. For example, as seen in the screenshot, this is a lamp and angel wings. I give the code and screenshots below. #version 330 core uniform sampler2D normalMap; // in view space uniform sampler2D depthMap; // in view space uniform sampler2D colorMap; uniform sampler2D reflectionStrengthMap; uniform mat4 projection; uniform mat4 inv_projection; in vec2 texCoord; layout (location = 0) out vec4 fragColor; vec3 calcViewPosition(in vec2 texCoord) { // Combine UV & depth into XY & Z (NDC) vec3 rawPosition = vec3(texCoord, texture(depthMap, texCoord).r); // Convert from (0, 1) range to (-1, 1) vec4 ScreenSpacePosition = vec4(rawPosition * 2 - 1, 1); // Undo Perspective transformation to bring into view space vec4 ViewPosition = inv_projection * ScreenSpacePosition; ViewPosition.y *= -1; // Perform perspective divide and return return ViewPosition.xyz / ViewPosition.w; } vec2 rayCast(vec3 dir, inout vec3 hitCoord, out float dDepth) { dir *= 0.25f; for (int i = 0; i < 20; i++) { hitCoord += dir; vec4 projectedCoord = projection * vec4(hitCoord, 1.0); projectedCoord.xy /= projectedCoord.w; projectedCoord.xy = projectedCoord.xy * 0.5 + 0.5; float depth = calcViewPosition(projectedCoord.xy).z; dDepth = hitCoord.z - depth; if(dDepth < 0.0) return projectedCoord.xy; } return vec2(-1.0); } void main() { vec3 normal = texture(normalMap, texCoord).xyz * 2.0 - 1.0; vec3 viewPos = calcViewPosition(texCoord); // Reflection vector vec3 reflected = normalize(reflect(normalize(viewPos), normalize(normal))); // Ray cast vec3 hitPos = viewPos; float dDepth; float minRayStep = 0.1f; vec2 coords = rayCast(reflected * minRayStep, hitPos, dDepth); if (coords != vec2(-1.0)) fragColor = mix(texture(colorMap, texCoord), texture(colorMap, coords), texture(reflectionStrengthMap, texCoord).r); else fragColor = texture(colorMap, texCoord); } Screenshot: colorMap: normalMap: depthMap: I will be grateful for help
  • Advertisement
×

Important Information

By using GameDev.net, you agree to our community Guidelines, Terms of Use, and Privacy Policy.

We are the game development community.

Whether you are an indie, hobbyist, AAA developer, or just trying to learn, GameDev.net is the place for you to learn, share, and connect with the games industry. Learn more About Us or sign up!

Sign me up!