SSAO problems! Please help!

Started by
6 comments, last by Vilem Otte 13 years ago
I been tearing my hair out for days trying to get my DX9 SSAO code working. I know that there have been a million posts on this, but dispute reading them 1000 times I'm still having trouble!

I'm using the algorithm described in:

A Simple and Practical Approach to SSAO:

I think the issue stems from my calculation of view space normals and positions. Is the view space position suppose to be normalized?

Here is the code for storing view space normals and positions to two D3DFMT_A32B32G32R32F render targets:

Position and Normal Map Generation:

struct PosNormMap_PSIn
{
float4 PosWVP : POSITION0; // Homogenious position
float4 PosWV : TEXCOORD0; // View space position
float4 NormWV : TEXCOORD1; // View space normal
};

struct PosNormMap_PSOut // 128 bit buffers
{
float4 Pos : COLOR0; // View space position
float4 Norm : COLOR1; // View space normal
};


// Vertex Shader
PosNormMap_PSIn BuildPosNormMapVS(float3 Position : POSITION,
float3 Normal : NORMAL0 )
{
PosNormMap_PSIn Output;

Output.PosWVP = mul(float4(Position, 1.0f), gWVP);
Output.PosWV = mul(float4(Position, 1.0f), gWV);
Output.NormWV = mul(float4(Normal, 0.0f), gWV);

return Output;
}


// Normalize into [0,1]... Not really necessary for floating point textures, but required for integer textures
float4 NormalToTexture(float4 norm)
{
return norm * 0.5f + 0.5f;
}

// Pixel Shader
PosNormMap_PSOut BuildPosNormMapPS(PosNormMap_PSIn Input)
{
PosNormMap_PSOut Output;

Output.Pos = Input.PosWV;
//Output.Pos.xy = Output.Pos.xy / Output.Pos.w;
//Output.Pos.z = linstep(gCameraNearFar.x, gCameraNearFar.y, Output.Pos.z); // Rescale from 0 to 1
Output.Norm = NormalToTexture(normalize(Input.NormWV)); // Interpolated normals can become unnormal--so normalize., put into [0,1] range

return Output;
}

technique BuildPosNormMap_Tech
{
pass P0
{
vertexShader = compile vs_3_0 BuildPosNormMapVS();
pixelShader = compile ps_3_0 BuildPosNormMapPS();
}
}



Here is the code for populating the occlusion buffer (D3DFMT_R32F):


uniform extern float2 gVectorNoiseSize; // default: 64x64
uniform extern float2 gScreenSize;
uniform extern float gSSAOSampleRadius; // default: 0.5-->2.0
uniform extern float gSSAOBias; // default: 0.05
uniform extern float gSSAOIntensity; // default: 3.0
uniform extern float gSSAOScale; // default: 1.0-->2.0


sampler PPSampPosition = sampler_state
{
Texture = <gPPPosition>;
MipFilter = POINT;
MinFilter = POINT;
MagFilter = POINT;
MaxAnisotropy = 1;
AddressU = CLAMP;
AddressV = CLAMP;
};

sampler PPSampNormal = sampler_state
{
Texture = <gPPNormal>;
MipFilter = POINT;
MinFilter = POINT;
MagFilter = POINT;
MaxAnisotropy = 1;
AddressU = CLAMP;
AddressV = CLAMP;
};

sampler PPSampVectorNoise = sampler_state
{
Texture = <gPPVectorNoise>;
MipFilter = LINEAR;
MinFilter = LINEAR;
MagFilter = LINEAR;
MaxAnisotropy = 1;
AddressU = WRAP;
AddressV = WRAP;
};


// Vertex Shader
void OcclusionMap_VS(float3 pos0 : POSITION,
float2 tex0 : TEXCOORD0,
out float4 oPos0 : POSITION0,
out float2 oTex0 : TEXCOORD1)
{
// Pass on texture and position coords to PS
oTex0 = tex0;
oPos0 = float4(pos0,1.0f);
}

float3 getPosition(in float2 tex0)
{
return tex2D(PPSampPosition,tex0).xyz;
}

float3 getNormal(in float2 tex0)
{
return normalize(tex2D(PPSampNormal, tex0).xyz * 2.0f - 1.0f);
//return tex2D(PPSampNormal, tex0).xyz * 2.0f - 1.0f;
}

float2 getRandom(in float2 tex0)
{
return normalize(tex2D(PPSampVectorNoise, gScreenSize * tex0 / gVectorNoiseSize).xy * 2.0f - 1.0f);
}

float doAmbientOcclusion(in float2 tcoord,in float2 tex0, in float3 p, in float3 cnorm)
{
float3 diff = getPosition(tcoord + tex0) - p;
const float3 v = normalize(diff);
const float d = length(diff)*gSSAOScale;
return max(0.0,dot(cnorm,v)-gSSAOBias)*(1.0/(1.0+d))*gSSAOIntensity;
}


// Pixel Shader
float4 OcclusionMap_PS(float2 tex0 : TEXCOORD1) : COLOR
{
const float2 vec[4] = { float2(1,0),float2(-1,0),float2(0,1),float2(0,-1) };

float3 p = getPosition(tex0);
float3 n = getNormal(tex0);
float2 rand = getRandom(tex0);
float ao = 0.0f;
float rad = gSSAOSampleRadius/p.z;
//SSAO Calculation
const int iterations = 4;
for (int j = 0; j < iterations; ++j)
{
float2 coord1 = reflect(vec[j],rand)*rad;
// float2 coord2 = float2(coord1.x*0.707 - coord1.y*0.707, coord1.x*0.707 + coord1.y*0.707);
float2 coord2 = float2(coord1.x - coord1.y, coord1.x + coord1.y) * 0.707f;
ao += doAmbientOcclusion(tex0,coord1*0.25, p, n);
ao += doAmbientOcclusion(tex0,coord2*0.5, p, n);
ao += doAmbientOcclusion(tex0,coord1*0.75, p, n);
ao += doAmbientOcclusion(tex0,coord2, p, n);
}

ao/=(float)iterations*4.0;
//END

return float4(ao, 1.0f, 1.0f, 1.0f);
}


// Technique
technique OcclusionMap_Tech
{
pass P0
{
// Specify the vertex and pixel shader associated with this pass.
vertexShader = compile vs_3_0 OcclusionMap_VS();
pixelShader = compile ps_3_0 OcclusionMap_PS();
}
}



The normal buffer output looks ok. But the view buffer output looks off. I think each object is close to the camera near plane so (Z << 1). I think that's why everything looks dark.

Also, what Filters should be used for position and normal buffers? When calculating the occlusion buffer I'm rendering a full screen quad the same size as the position and normal buffers, so 1:1 texel mapping. I'm guessing POINT, with CLAMP addressing is correct?

Viewspace Position Buffer (62.5% of original size):
5556177450_b0ef5a57cd.jpg


Viewspace Normal Buffer (62.5% of original size):
5556177422_01738ecd6f.jpg


Occlusion Buffer (62.5% of original size):
5555591431_795576c6d3.jpg


Thanks in advance for the help!
Advertisement
Your ViewSpace-Position buffer look wrong. You should see 4
"squares' there, not just a blue gradient.

Are you shure the "gWV"-Matrix is right? It should contain WorldMatrix*ViewMatrix.
I'm pretty sure it's correct. Stepping through the code, the matrices look ok. I even calculated the transforms in Matlab and compared it against the pixel shader results in PIX.

This is the code that sets the view & project matrices per frame:


void camera::Update(void)
{
m_vUp = g_UI->GetSetting<D3DXVECTOR3>(&var_startingUp);
util::GetCorrectUp(&m_vLookAtPt, &m_vEyePt, &m_vUp); // To correct for arbitrary eye direction

D3DXMatrixLookAtLH(&m_matView,&m_vEyePt,&m_vLookAtPt,&m_vUp);

// Fit near and far to scene objects's world bounds.
// Important for cascaded shadow maps to reduce aliasing.
g_objectManager->FitViewMatrixNearFarToRBObjects(&m_matView, &zNearFar.x, &zNearFar.y, zNearMin, zFarMax);

zNearToFar = zNearFar.y - zNearFar.x;

g_UI->GetWidthHeight(&width, &height);
D3DXMatrixPerspectiveFovLH(&m_matProjection,
g_UI->GetSetting<float>(&var_fieldOfView),
(float)width / (float)height,
zNearFar.x, zNearFar.y);

m_ViewProj = m_matView * m_matProjection;
}//Update


This is the code that sets the matrices and draws a meshed object:


void renderer::DrawTexturedMeshPosNorm(rbobjectMeshData * meshData, D3DXMATRIXA16 * matWorld)
{
UINT numPasses = 0;
HR(m_FX->Begin(&numPasses, 0),L"Render::DrawTexturedMeshPosNorm() - m_FX->Begin Failed: ");
HR(m_FX->BeginPass(0),L"Render::DrawTexturedMeshPosNorm() - m_FX->BeginPass Failed: ");


D3DXMATRIX WV;
D3DXMatrixMultiply(& WV, matWorld, & g_objectManager->GetCamera()->m_matView ); // matWorld is current RBO model->world transform
HR(m_FX->SetMatrix(m_FXHandles.m_hWV, & WV), L"Render::SetPosNormMatricies() - Failed to set m_hWV matrix: ");

D3DXMATRIX WVP;
D3DXMatrixMultiply(& WVP, & WV, & g_objectManager->GetCamera()->m_matProjection ); // matWorld is current RBO model->world transform
HR(m_FX->SetMatrix(m_FXHandles.m_hWVP, & WVP), L"Render::SetPosNormMatricies() - Failed to set m_hWVP matrix: ");


HR(m_FX->CommitChanges(),L"Render::DrawTexturedMeshPosNorm() - CommitChanges failed: ");

for(UINT j = 0; j < meshData->materials->Size(); ++j)
{
HR(meshData->pMesh->DrawSubset(j),L"Render::DrawTexturedMeshPosNorm() - DrawSubset failed: ");
}

HR(m_FX->EndPass(),L"Render::DrawTexturedMeshPosNorm() - m_FX->EndPass Failed: ");
HR(m_FX->End(),L"Render::DrawTexturedMeshPosNorm() - m_FX->End Failed: ");

}
Thanks again for your help.

Does view space position need to be normalized form 0 to 1? That's the only way I can imagine that view space positions would give 4 colored squares. Otherwise, some view values will be negative (DirectX viewspace x and y is -1 to 1 I think), which will be dark on screen.

Does depth need to be normalized as well?

Sorry for bombarding you with questions... and I appreciate the help.
No, you don't have to normalize it. I tried it in my code and it looks different.

Maybe you get those problems because you mix D3DXMATRIX and D3DXMATRIXA16?
Other thing you could try is to transform the position by each matrix separately.

Here is an image of how you want it to look like:
ilzp6y52.jpg
Hmm... I guess I'm having trouble working out why it should look like that:

Firstly, I think if (viewpos.x, viewpos.y, viewpos.z) are stored as (R, G, B ) , then shouldn't objects further away from the camera have a stronger blue component?

In your view space rendering it looks like blue is constant across the low portion of the screen. When you're rendering the position data, are you rendering like this?:


// Sample the texture
float3 xyz = tex2D(PPSampSource, tex0).xyz;
return float4(xyz, 1.0);



Or are you rendering some other representation of the data? As a side note; what format are you using to store your view-space position buffer? I'm guessing floating point...

I'm really stuck on this. I feel like I must be doing something very stupid.
OK, now I'm getting somewhere...

My camera far plane was 2.5, my near plane was 0.01. All my objects were very close to the camera and they were all very small (<0.01 in width!). The view space coordinates were correct, it was just a matter of scaled values. The R and G and B values were therefore < 1, which is why everything just looked black.

I was right, I was doing something stupid.

So, I've made all my world objects 100x larger, and changed the camera near and far planes to 1.0 and 250.0. Now the view space coordinates look like this:

View Space Buffer:
5556274705_8c9cf9b9c3.jpg


Occlusion buffer:
5556281485_1775f7d931.jpg


So there's something wrong with the occlusion buffer, but at least I know the inputs are now OK. I'll continue to debug and get back to you guys.
Actually your occlusion buffer is correct .... though to see the value that you'll be multiplying by, you need to store not "result", but "1.0 - result", also note: Make sure you will clamp your result between 0.0 and 1.0.

Maybe decreasing radius a bit and adding more samples (or jittering helps a lot) ... and a bilateral blur phase after this, then you will get good SSAO.

My current blog on programming, linux and stuff - http://gameprogrammerdiary.blogspot.com

This topic is closed to new replies.

Advertisement