• FEATURED
• FEATURED
• FEATURED
• FEATURED
• FEATURED

View more

View more

View more

### Image of the Day Submit

IOTD | Top Screenshots

### The latest, straight to your Inbox.

Subscribe to GameDev.net Direct to receive the latest updates and exclusive content.

# DX11 SSAO - Is this right? Again...

Old topic!

Guest, the last post of this topic is over 60 days old and at this point you may not reply in this topic. If you wish to continue this conversation start a new topic.

16 replies to this topic

### #1Migi0027 (肉コーダ)  Members

Posted 25 April 2013 - 04:57 PM

Hi guys,

this is kind of a re post and I am sorry for that, but this just isn't really working, or I don't think it is. Here is the results:

Without SSAO diffuse:

Normals:

Depth:

With SSAO:

Now some HLSL code:

Texture2D t_dffmap : register(t0);
Texture2D t_depthmap : register(t1);
Texture2D t_normalmap : register(t2);
Texture2D t_random : register(t3);
Texture2D t_blmextract : register(t4);
SamplerState ss;

cbuffer PARAMSBUFFER : register(b0)
{
float time;
float hblur;
float bloomExtract;
float bloom;
float pixelDisortion;
float pixelDisorterAmount;
float ssao;
float space;
};

cbuffer BloomBuffer : register(b1)
{
float BloomThreshold;
float BloomSaturation;
float BaseSaturation;
float BloomIntensity;
float BaseIntensity;
};

cbuffer SSAOBuffer : register(b2)
{
float g_scale;
float g_bias;
float g_intensity;
};

struct VS_Output
{
float4 Pos : SV_POSITION;
float2 Tex : TEXCOORD0;
float2 texCoord1 : TEXCOORD1;
float2 texCoord2 : TEXCOORD2;
float2 texCoord3 : TEXCOORD3;
float2 texCoord4 : TEXCOORD4;
float2 texCoord5 : TEXCOORD5;
float2 texCoord6 : TEXCOORD6;
float2 texCoord7 : TEXCOORD7;
float2 texCoord8 : TEXCOORD8;
float2 texCoord9 : TEXCOORD9;
};

{
VS_Output Output;
Output.Tex = float2((id << 1) & 2, id & 2);
Output.Pos = float4(Output.Tex * float2(2,-2) + float2(-1,1), 0, 1);

if (hblur == 1)
{
float texelSize = 1.0f / 800;

// Create UV coordinates for the pixel and its four horizontal neighbors on either side.
Output.texCoord1 = Output.Tex + float2(texelSize * -4.0f, 0.0f);
Output.texCoord2 = Output.Tex + float2(texelSize * -3.0f, 0.0f);
Output.texCoord3 = Output.Tex + float2(texelSize * -2.0f, 0.0f);
Output.texCoord4 = Output.Tex + float2(texelSize * -1.0f, 0.0f);
Output.texCoord5 = Output.Tex + float2(texelSize *  0.0f, 0.0f);
Output.texCoord6 = Output.Tex + float2(texelSize *  1.0f, 0.0f);
Output.texCoord7 = Output.Tex + float2(texelSize *  2.0f, 0.0f);
Output.texCoord8 = Output.Tex + float2(texelSize *  3.0f, 0.0f);
Output.texCoord9 = Output.Tex + float2(texelSize *  4.0f, 0.0f);
}

return Output;
}

// Helper for modifying the saturation of a color.
{
// The constants 0.3, 0.59, and 0.11 are chosen because the
// human eye is more sensitive to green light, and less to blue.
float grey = dot(color, float3(0.3, 0.59, 0.11));

return lerp(grey, color, saturation);
}

// Ambient Occlusion Stuff --------------------------------------------------

float3 getPosition(in float2 uv)
{
return t_depthmap.Sample(ss, uv).xyz;
}

float3 getNormal(in float2 uv)
{
return normalize(t_normalmap.Sample(ss, uv).xyz * 2.0f - 1.0f);
}

float2 getRandom(in float2 uv)
{
return normalize(t_random.Sample(ss, float2(800, 600) * uv / float2(64, 64)).xy * 2.0f - 1.0f);
}

float doAmbientOcclusion(in float2 tcoord,in float2 uv, in float3 p, in float3 cnorm)
{
float3 diff = getPosition(tcoord + uv) - p;
const float3 v = normalize(diff);
const float d = length(diff)*g_scale;
return max(0.0,dot(cnorm,v)-g_bias)*(1.0/(1.0+d))*g_intensity;
}

// End

{
if (bloomExtract == 1)
{
// Look up the original image color.
float4 c = t_dffmap.Sample(ss, input.Tex);

// Adjust it to keep only values brighter than the specified threshold.
return saturate((c - BloomThreshold) / (1 - BloomThreshold));
}

float4 color = float4(1.0f, 1.0f, 1.0f, 1.0f);

if (pixelDisortion == 1)
{
// Distortion factor
float NoiseX = pixelDisorterAmount * (time/1000) * sin(input.Tex.x * input.Tex.y+time/1000);
NoiseX=fmod(NoiseX,8) * fmod(NoiseX,4);

// Use our distortion factor to compute how much it will affect each
// texture coordinate
float DistortX = fmod(NoiseX,5);
float DistortY = fmod(NoiseX,5+0.002);

// Create our new texture coordinate based on our distortion factor
input.Tex = float2(DistortX,DistortY);
}

float4 dffMAP = t_dffmap.Sample(ss, input.Tex);

if (hblur == 1)
{
float weight0, weight1, weight2, weight3, weight4;
float normalization;

// Create the weights that each neighbor pixel will contribute to the blur.
weight0 = 1.0f;
weight1 = 0.9f;
weight2 = 0.55f;
weight3 = 0.18f;
weight4 = 0.1f;

// Create a normalized value to average the weights out a bit.
normalization = (weight0 + 2.0f * (weight1 + weight2 + weight3 + weight4));

// Normalize the weights.
weight0 = weight0 / normalization;
weight1 = weight1 / normalization;
weight2 = weight2 / normalization;
weight3 = weight3 / normalization;
weight4 = weight4 / normalization;

// Add the nine horizontal pixels to the color by the specific weight of each.
color += t_dffmap.Sample(ss, input.texCoord1) * weight4;
color += t_dffmap.Sample(ss, input.texCoord2) * weight3;
color += t_dffmap.Sample(ss, input.texCoord3) * weight2;
color += t_dffmap.Sample(ss, input.texCoord4) * weight1;
color += t_dffmap.Sample(ss, input.texCoord5) * weight0;
color += t_dffmap.Sample(ss, input.texCoord6) * weight1;
color += t_dffmap.Sample(ss, input.texCoord7) * weight2;
color += t_dffmap.Sample(ss, input.texCoord8) * weight3;
color += t_dffmap.Sample(ss, input.texCoord9) * weight4;
}
else
color *= dffMAP;

if (ssao == 1)
{
// Apply SSAO

const float2 vec[4] = {float2(1,0),float2(-1,0),
float2(0,1),float2(0,-1)};

float3 p = getPosition(input.Tex);
float3 n = getNormal(input.Tex);
float2 rand = getRandom(input.Tex);

float ao = 0.0f;

//**SSAO Calculation**//
int iterations = 1;
for (int j = 0; j < iterations; ++j)
{
float2 coord2 = float2(coord1.x*0.707 - coord1.y*0.707,
coord1.x*0.707 + coord1.y*0.707);

ao += doAmbientOcclusion(input.Tex,coord1*0.25, p, n);
ao += doAmbientOcclusion(input.Tex,coord2*0.5, p, n);
ao += doAmbientOcclusion(input.Tex,coord1*0.75, p, n);
ao += doAmbientOcclusion(input.Tex,coord2, p, n);
}
ao/=(float)iterations*4.0;
color.rgb *= ao;
}

if(bloom == 1)
{
// Look up the bloom and original base image colors.
float4 cbloom = t_blmextract.Sample(ss, input.Tex);
float4 base = color;

// Adjust color saturation and intensity.
cbloom = AdjustSaturation(cbloom, BloomSaturation) * BloomIntensity;
base = AdjustSaturation(base, BaseSaturation) * BaseIntensity;

// Darken down the base image in areas where there is a lot of bloom,
// to prevent things looking excessively burned-out.
base *= (1 - saturate(cbloom));

// Combine the two images.
color = base + cbloom;
}

return color;
}


The variables for the SSAO is the following:

SSAOParameters.g_scale = 1;
SSAOParameters.g_intensity = 1;
SSAOParameters.g_bias = 0.001f;

Now what on earth, if, am I doing wrong?

Thank You

Edited by Migi0027, 26 April 2013 - 12:16 AM.

FastCall22: "I want to make the distinction that my laptop is a whore-box that connects to different network"

Blog about... stuff (GDNet, WordPress): www.gamedev.net/blog/1882-the-cuboid-zone/cuboidzone.wordpress.com/

### #2Jason Z  Members

Posted 25 April 2013 - 07:33 PM

Do you have an image with SSAO enabled?  The only thing that looks odd is the fact that your shadow isn't filled at all, but that isn't really relevant to SSAO at all.

Jason Zink :: DirectX MVP

Direct3D 11 engine on CodePlex: Hieroglyph 3

Games: Lunar Rift

### #3Migi0027 (肉コーダ)  Members

Posted 26 April 2013 - 12:17 AM

Sorry, some kind of error occurred so the rest of the post wasn't shown. Please re read the post, or at least the bottom.

Sorry

FastCall22: "I want to make the distinction that my laptop is a whore-box that connects to different network"

Blog about... stuff (GDNet, WordPress): www.gamedev.net/blog/1882-the-cuboid-zone/cuboidzone.wordpress.com/

### #4Migi0027 (肉コーダ)  Members

Posted 26 April 2013 - 04:35 AM

Do you have an image with SSAO enabled?  The only thing that looks odd is the fact that your shadow isn't filled at all, but that isn't really relevant to SSAO at all.

Ohh and the box, it's because receive shadows (variable) has been disabled for that box, I was just unlucky.

FastCall22: "I want to make the distinction that my laptop is a whore-box that connects to different network"

Blog about... stuff (GDNet, WordPress): www.gamedev.net/blog/1882-the-cuboid-zone/cuboidzone.wordpress.com/

### #5Jason Z  Members

Posted 26 April 2013 - 04:36 AM

The basics of SSAO appear to be generally working, as the samples tend to be darker near a corner.  However, you seem to have some sort of a depth bias in your calculations, because further into the scene things are getting darker.  If you look at the floor plane then you can see the difference between the foreground and the background, even though they have the exact same occluders in their local areas.

When you select the samples to take, are they being done in screen space or are they reprojected into world space?

Jason Zink :: DirectX MVP

Direct3D 11 engine on CodePlex: Hieroglyph 3

Games: Lunar Rift

### #6Migi0027 (肉コーダ)  Members

Posted 26 April 2013 - 04:44 AM

If this is any help, I'm following this: http://www.gamedev.net/page/resources/_/technical/graphics-programming-and-theory/a-simple-and-practical-approach-to-ssao-r2753

FastCall22: "I want to make the distinction that my laptop is a whore-box that connects to different network"

Blog about... stuff (GDNet, WordPress): www.gamedev.net/blog/1882-the-cuboid-zone/cuboidzone.wordpress.com/

### #7Migi0027 (肉コーダ)  Members

Posted 26 April 2013 - 07:11 AM

The darkness in the scene with SSAO, can it be because of my depth buffer?

I'm just trying to find a possible solution for this problem.

FastCall22: "I want to make the distinction that my laptop is a whore-box that connects to different network"

Blog about... stuff (GDNet, WordPress): www.gamedev.net/blog/1882-the-cuboid-zone/cuboidzone.wordpress.com/

### #8Migi0027 (肉コーダ)  Members

Posted 26 April 2013 - 08:51 AM

Ohh, this might be useful:

The shader for normal and depth rendering:

cbuffer ConstantObjectBuffer : register (b0)
{
matrix worldMatrix;
matrix viewMatrix;
matrix projectionMatrix;

float state;
float _instance;
float _alphamap;
};

struct VOut
{
float4 position : SV_POSITION;
float4 depthPosition : TEXTURE0;
float4 normal : NORMAL;
float2 texcoord : TEXCOORD;
float Depth : DEPTH;
};

Texture2D t_alphamap;
SamplerState ss;

VOut VShader(float4 position : POSITION, float4 normal : NORMAL, float2 texcoord : TEXCOORD, float3 instancePosition : INSTANCEPOS)
{
VOut output;

if (_instance == 1)
{
position.x += instancePosition.x;
position.y += instancePosition.y;
position.z += instancePosition.z;
}

position.w = 1.0f;
output.texcoord = texcoord;

// Calculate the position of the vertex against the world, view, and projection matrices.
output.position = mul(position, worldMatrix);
output.position = mul(output.position, viewMatrix);
output.position = mul(output.position, projectionMatrix);

output.normal = normal;

// Store the position value in a second input value for depth value calculations.
output.depthPosition = output.position;
output.Depth = output.position.z;

return output;
}

{
float4 color = float4(1,1,1,1);

if (state == 5 || state == 2) // DEPTH
{
float depthValue = input.depthPosition.z / input.depthPosition.w;

color = float4(depthValue, depthValue, depthValue, 1);
}
else if (state == 6) // NORMALS
{
float3 viewSpaceNormalizedNormals = 0.5 * normalize (input.normal) + 0.5;
color = float4(viewSpaceNormalizedNormals, 1);
}

return color;
}


Don't worry about the ifs, what's really important is if I'm mapping the depth and normal buffers correctly?

FastCall22: "I want to make the distinction that my laptop is a whore-box that connects to different network"

Blog about... stuff (GDNet, WordPress): www.gamedev.net/blog/1882-the-cuboid-zone/cuboidzone.wordpress.com/

### #9Jason Z  Members

Posted 26 April 2013 - 03:56 PM

It looks like you are using perspective depth, which would probably explain the increasing darkness as you move into the scene.  You are using screen space offsets from the pixel being processed, which aren't linear with z, so your pixels that are far away are skewed toward being occluded.

What you should be doing is to find the linear space position of the current pixel, then add a linear space vector offset to it, then reproject that point back to projection space and sample the point.  That would (most likely) make the darkening go away.

Jason Zink :: DirectX MVP

Direct3D 11 engine on CodePlex: Hieroglyph 3

Games: Lunar Rift

### #10Migi0027 (肉コーダ)  Members

Posted 26 April 2013 - 06:28 PM

Sorry, but how would I achieve that Jason Z?

Some of these topics are still quite new for me as this is self taught, so I'm still trying to understand every single bit.

FastCall22: "I want to make the distinction that my laptop is a whore-box that connects to different network"

Blog about... stuff (GDNet, WordPress): www.gamedev.net/blog/1882-the-cuboid-zone/cuboidzone.wordpress.com/

### #11Jason Z  Members

Posted 27 April 2013 - 05:22 AM

Are you familiar with the various spaces in the rendering pipeline?  For example, your transform typically goes from object space to model space to view space, and they are all simple transformations that just change the orientation and location of the origin of their previous spaces.

The projection matrix is different though, since it warps the geometry of the scene so that a frustum shaped chunk of the scene fits into a cube.  This non linear behavior is what I suspect is your issue.

So the steps in the process that you need to implement in order to find out if this is the case are all in your shader:

1. For the pixel currently being calculated, find its view space position.  You will need to instrument your shader for this - either passing the view space position in your attributes, or passing an inverse projection matrix in your constant buffers.
2. When you do the offsets in your depth samples, these are now being applied to that view space position.  They will also be in your regular units as well (i.e. meters or whatever unit you use), so it is more logical to think about how large the radius is.
3. However, to look up the where that 3D view space offset location is in your depth buffer, you need to re-project the point and find its location in the depth buffer.  This can either use the projection matrix, or you can just do the simple math version on the xy coordinates (since those are what is needed to find the depth buffer location).

Have you tried to implement any of these steps yet?  If so, which ones are you getting hung up on?

Jason Zink :: DirectX MVP

Direct3D 11 engine on CodePlex: Hieroglyph 3

Games: Lunar Rift

### #12Migi0027 (肉コーダ)  Members

Posted 28 April 2013 - 04:10 AM

So I pass the inverse projection matrix to the post process shader (with ssao)?

And then I'm stuck in the 3rd step. So I'm supposed to somehow edit this:

const float2 vec[4] = {float2(1,0),float2(-1,0),
float2(0,1),float2(0,-1)};

float3 p = getPosition(input.Tex);
float3 n = getNormal(input.Tex);
float2 rand = getRandom(input.Tex);

float ao = 0.0f;

//**SSAO Calculation**//
int iterations = 1;
for (int j = 0; j < iterations; ++j)
{
float2 coord2 = float2(coord1.x*0.707 - coord1.y*0.707,
coord1.x*0.707 + coord1.y*0.707);

ao += doAmbientOcclusion(input.Tex,coord1*0.25, p, n);
ao += doAmbientOcclusion(input.Tex,coord2*0.5, p, n);
ao += doAmbientOcclusion(input.Tex,coord1*0.75, p, n);
ao += doAmbientOcclusion(input.Tex,coord2, p, n);
}
ao/=(float)iterations*4.0;
color.rgb *= ao;


But exactly how?

FastCall22: "I want to make the distinction that my laptop is a whore-box that connects to different network"

Blog about... stuff (GDNet, WordPress): www.gamedev.net/blog/1882-the-cuboid-zone/cuboidzone.wordpress.com/

### #13Jason Z  Members

Posted 28 April 2013 - 06:15 AM

I'm sorry, but I am not going to write the shader for you.  Do you have specific questions about how it works?

Jason Zink :: DirectX MVP

Direct3D 11 engine on CodePlex: Hieroglyph 3

Games: Lunar Rift

### #14Migi0027 (肉コーダ)  Members

Posted 28 April 2013 - 06:32 AM

It's more that I don't understand exactly what I'm supposed to do...

FastCall22: "I want to make the distinction that my laptop is a whore-box that connects to different network"

Blog about... stuff (GDNet, WordPress): www.gamedev.net/blog/1882-the-cuboid-zone/cuboidzone.wordpress.com/

### #15Jason Z  Members

Posted 28 April 2013 - 12:21 PM

Do you have specific questions about how it works?

That's why I'm asking you if you have any specific questions about how it works!  That is also why I listed the process in steps, so that you can direct questions about a particular portion of the process.  You need to think about each step, and ask us a question about it - there are many people here willing to help, but I doubt anyone is going to just write the shader for you and say here is your solution.

If you have absolutely no idea what those process steps mean, then ask a question about them, don't ask for a code example showing it.

Jason Zink :: DirectX MVP

Direct3D 11 engine on CodePlex: Hieroglyph 3

Games: Lunar Rift

### #16Migi0027 (肉コーダ)  Members

Posted 28 April 2013 - 03:39 PM

Sorry for the trouble!

It's in the step 3:

However, to look up the where that 3D view space offset location is in your depth buffer, you need to re-project the point and find its location in the depth buffer.  This can either use the projection matrix, or you can just do the simple math version on the xy coordinates (since those are what is needed to find the depth buffer location).

So how can i re-project a certain point and then find it's position in my depth buffer?

FastCall22: "I want to make the distinction that my laptop is a whore-box that connects to different network"

Blog about... stuff (GDNet, WordPress): www.gamedev.net/blog/1882-the-cuboid-zone/cuboidzone.wordpress.com/

### #17Jason Z  Members

Posted 29 April 2013 - 05:15 AM

In this case, you can either directly use a projection matrix (which must be by itself with no view matrix multiplied) that is supplied through a constant buffer, or you can do some of the math that the projection matrix normally does in your own code.  The latter is more efficient, since you are only worried about the xy coordinates so you know where to sample the buffer.

So to do the math on only the xy coordinates, try taking a look at the formula for the projection matrix that you are using, and write out the equation for only the x and y.  This will guide you on what math is required to get back to clips space coordinates.  Once you have these clip space coordinates, you just need to remap them to texture coordinates and sample the texture.

If you aren't too familiar with matrix math, then check out the Wikipedia page for how a vector is multiplied by a matrix, and give it a shot.  You can always post questions here if something isn't clear to you.

Jason Zink :: DirectX MVP

Direct3D 11 engine on CodePlex: Hieroglyph 3

Games: Lunar Rift

Old topic!

Guest, the last post of this topic is over 60 days old and at this point you may not reply in this topic. If you wish to continue this conversation start a new topic.