• Advertisement
Sign in to follow this  

DX11 DX11 - SSAO

This topic is 1645 days old which is more than the 365 day threshold we allow for new replies. Please post a new topic.

If you intended to correct an error in the post then please contact us.

Recommended Posts

I'm so terribly sorry to re-re-re open this one, but I never fully succeeded at this one, SSAO. unsure.png

 

SSAO Shader:

Texture2D t_depthmap : register(t0);
Texture2D t_normalmap : register(t1);
Texture2D t_random : register(t2);
SamplerState ss;

cbuffer SSAOBuffer : register(c0)
{
	float g_scale;
	float g_bias;
	float g_sample_rad;
	float g_intensity;
	float ssaoIterations;
	float3 pppspace;

	matrix view;
};

struct VS_Output
{  
	float4 Pos : SV_POSITION;              
	float2 Tex : TEXCOORD0;
};
 
VS_Output VShader(uint id : SV_VertexID)
{
	VS_Output Output;
	Output.Tex = float2((id << 1) & 2, id & 2);
	Output.Pos = float4(Output.Tex * float2(2,-2) + float2(-1,1), 0, 1);
	return Output;
}

// Helper for modifying the saturation of a color.
float4 AdjustSaturation(float4 color, float saturation)
{
	// The constants 0.3, 0.59, and 0.11 are chosen because the
	// human eye is more sensitive to green light, and less to blue.
	float grey = dot(color, float3(0.3, 0.59, 0.11));

	return lerp(grey, color, saturation);
}

// Ambient Occlusion Stuff --------------------------------------------------

float3 getPosition(in float2 uv)
{
	return t_depthmap.Sample(ss, uv).xyz;
}

float3 getNormal(in float2 uv)
{
	return normalize(t_normalmap.Sample(ss, uv).xyz * 2.0f - 1.0f);
}

float2 getRandom(in float2 uv)
{
	//return normalize(t_random.Sample(ss, uv ).xy * 2.0f - 1.0f); // ~100FPS
	return normalize(t_random.Sample(ss, float2(600, 800) * uv / float2(60, 60)).xy * 2.0f - 1.0f);
}

float doAmbientOcclusion(in float2 tcoord,in float2 uv, in float3 p, in float3 cnorm)
{
	float3 diff = getPosition(tcoord + uv) - p;
	const float3 v = normalize(diff);
	const float d = length(diff)*g_scale;
	return max(0.0,dot(cnorm,v)-g_bias)*(1.0/(1.0+d))*g_intensity;
}

// End

float4 PShader(VS_Output input) : SV_TARGET
{
	// ADD SSAO ---------------------------------------------------------------
	const float2 vec[4] = {float2(1,0),float2(-1,0),
				float2(0,1),float2(0,-1)};

	float3 p = getPosition(input.Tex);
	float3 n = getNormal(input.Tex);
	float2 rand = getRandom(input.Tex);

	float ao = 0.0f;
	float rad = g_sample_rad/p.z; // g_s_r

	//**SSAO Calculation**//
	int iterations = 4;
	for (int j = 0; j < iterations; ++j)
	{
	  float2 coord1 = reflect(vec[j], rand)*rad;
	  float2 coord2 = float2(coord1.x*0.707 - coord1.y*0.707,
				  coord1.x*0.707 + coord1.y*0.707);
	  
	  ao += doAmbientOcclusion(input.Tex, coord1*0.25, p, n);
	  ao += doAmbientOcclusion(input.Tex, coord2*0.5, p, n);
	  ao += doAmbientOcclusion(input.Tex, coord1*0.75, p, n);
	  ao += doAmbientOcclusion(input.Tex, coord2, p, n);
	}
	ao /= (float)iterations*4.0;

	ao = saturate(ao);

	return float4(ao, ao, ao, 1.0f);
}

How I write my depth map to the ssao:

VS
output.depth = output.position.z / output.position.w;
PS
float depth = input.depth;
output.Depth = float4(depth, depth, depth, 1);

How i write my normal map to the ssao:

VS -- I think this is wrong
output.NormalW = mul(mul(worldMatrix, viewMatrix), float4(normal.xyz,0) );
PS
output.Normals = float4(input.NormalW, 1);

Render Result:

 

2ztay6g.png

 

Ehh.

 

Again, I'm so sorry for re-posting this, it's just, i really want this feature completed (well, working at least).

 

Thank You, like always...

Share this post


Link to post
Share on other sites
Advertisement

So how would the depth shader be written correctly to the SSAO shader?

 

Thank you for your interest by the way, appreciate it!

Share this post


Link to post
Share on other sites

I think that that ssao shader needs view space position (whole three components x,y and z), as i can see you store normals that way, although:

output.NormalW = mul(mul(worldMatrix, viewMatrix), float4(normal.xyz,0) );

this mul order is right if shader packs matrices as column major, i don't know dx11  shader systems so i am not sure, but in dx9 i can force row major with this statement:

#pragma pack_matrix(row_major)

beacuse it matches my system in cpp code and i always mul vectors with matrices this way:

result = mul( vec, matrix );
Edited by belfegor

Share this post


Link to post
Share on other sites

Ok, it is a bit better now in the sense that it doesn't just become randomly black: (SSAO Map)

 

2vjsx8h.png

 

 

But this isn't right yet, well I don't think it is, I mean look at the left of the cube, where the left side is rendered, it's black, why?

 

Ohh, and belfegor, about the matrices, you were right!

 

How I write my depth map to the ssao:

VS:

output.position = mul(position, worldMatrix);
output.position = mul(output.position, viewMatrix);
output.position = mul(output.position, projectionMatrix);

output.NormalW = mul(float4(-normal.xyz,0), mul(worldMatrix, viewMatrix));

// Store the position value in a second input value for depth value calculations.
output.depthPosition = output.position;
PS:

// Depth
float depth = 1.0f - (input.depthPosition.z / input.depthPosition.w);
output.Depth = float4(depth, depth, depth, 1.0f);

// Normals
output.Normals = float4(input.NormalW, 1); 

Now, is this the correct way to map the depth and the normal maps?

Share this post


Link to post
Share on other sites

I said output view space position, so it is like this:

VS
output.depthPosition.xyz = mul(float4(position.xyz,1), mul(worldMatrix, viewMatrix)).xyz;
PS
output.Depth = float4(input.depthPosition.xyz, 1.0f);

although you must have proper render target (or "view" whatever they call it in dx11) format that can hold at least 3 floating point components.

 

And why do you invert your normals now?

output.NormalW = mul(float4(-normal.xyz,0), mul(worldMatrix, viewMatrix));

Then when you fix those issues, play with ssao "sampling radius" to match your world scale (in case you don't get expected results).

Edited by belfegor

Share this post


Link to post
Share on other sites

Let me try this one with my renderer and i get back to you. smile.png

 

EDIT: Here is it (on and off). All parameters are same as yours except radius which i set to 0.1f (10cm in my world) since it feels more natural for me.

 

oma-2013-07-18-20-47-50-.jpg

oma-2013-07-18-20-47-53-.jpg

Edited by belfegor

Share this post


Link to post
Share on other sites

I played a little more with this and done some changes that look better then before (at least for me), so you might want to try:

1. I have normalized view space normals in texture, and i don't get why do we need to scale-bias here? Also, this fixes some ugly artifacts.

float3 getNormal(in float2 uv)
{
    //return normalize(tex2D(gbNormal_samp, uv).xyz * 2.0f - 1.0f);
    return tex2D(gbNormal_samp, uv).xyz;
}

So you might need to change your gbuffer normal to be normalized also

output.Normals = float4(normalize(input.NormalW), 1);

 

2. I had getRandom before which looks nice for me:

float3 getRandom(in float2 uv)
{
    return tex2D(rand_samp, uv * ScreenParams.xy / 4.0f).xyz * 2.0f - 1.0f;
}

3. Setting scale to 0 looks better for me, so i can even remove some calculations:

float doAmbientOcclusion(in float2 tcoord,in float2 uv, in float3 p, in float3 cnorm)
{
    float g_scale = SSAO_params.x;
    float g_bias = SSAO_params.y;
    float g_intensity = SSAO_params.z;

    float3 diff = getPosition(tcoord + uv) - p;
    const float3 v = normalize(diff);
    //const float d = length(diff)*g_scale;
    return max(0.0,dot(cnorm,v)-g_bias)*g_intensity;//(1.0/(1.0+d))*g_intensity;
}

4. I get strange "haloing" artifacts on screen corners, so i set position and normal texture UV addressing mode to mirror and it seems that fixes that issue.

 

5. Changed radius to 0.15 and intensity to ~2.5

Edited by belfegor

Share this post


Link to post
Share on other sites

The reason for the false occlusions at the corners is simply because there is nothing in the scene there,

 

2db9ok1.png

 

But as you see in the bottom, when I go too close to the mesh, these false occlusions start to appear, and when really close, they're everywhere!

Share this post


Link to post
Share on other sites
Sign in to follow this  

  • Advertisement
  • Advertisement
  • Popular Now

  • Advertisement
  • Similar Content

    • By mister345
      Hi, can somebody please tell me in clear simple steps how to debug and step through an hlsl shader file?
      I already did Debug > Start Graphics Debugging > then captured some frames from Visual Studio and
      double clicked on the frame to open it, but no idea where to go from there.
       
      I've been searching for hours and there's no information on this, not even on the Microsoft Website!
      They say "open the  Graphics Pixel History window" but there is no such window!
      Then they say, in the "Pipeline Stages choose Start Debugging"  but the Start Debugging option is nowhere to be found in the whole interface.
      Also, how do I even open the hlsl file that I want to set a break point in from inside the Graphics Debugger?
       
      All I want to do is set a break point in a specific hlsl file, step thru it, and see the data, but this is so unbelievably complicated
      and Microsoft's instructions are horrible! Somebody please, please help.
       
       
       

    • By mister345
      I finally ported Rastertek's tutorial # 42 on soft shadows and blur shading. This tutorial has a ton of really useful effects and there's no working version anywhere online.
      Unfortunately it just draws a black screen. Not sure what's causing it. I'm guessing the camera or ortho matrix transforms are wrong, light directions, or maybe texture resources not being properly initialized.  I didnt change any of the variables though, only upgraded all types and functions DirectX3DVector3 to XMFLOAT3, and used DirectXTK for texture loading. If anyone is willing to take a look at what might be causing the black screen, maybe something pops out to you, let me know, thanks.
      https://github.com/mister51213/DX11Port_SoftShadows
       
      Also, for reference, here's tutorial #40 which has normal shadows but no blur, which I also ported, and it works perfectly.
      https://github.com/mister51213/DX11Port_ShadowMapping
       
    • By xhcao
      Is Direct3D 11 an api function like glMemoryBarrier in OpenGL? For example, if binds a texture to compute shader, compute shader writes some values to texture, then dispatchCompute, after that, read texture content to CPU side. I know, In OpenGL, we could call glMemoryBarrier before reading to assure that texture all content has been updated by compute shader.
      How to handle incoherent memory access in Direct3D 11? Thank you.
    • By _Engine_
      Atum engine is a newcomer in a row of game engines. Most game engines focus on render
      techniques in features list. The main task of Atum is to deliver the best toolset; that’s why,
      as I hope, Atum will be a good light weighted alternative to Unity for indie games. Atum already
      has fully workable editor that has an ability to play test edited scene. All system code has
      simple ideas behind them and focuses on easy to use functionality. That’s why code is minimized
      as much as possible.
      Currently the engine consists from:
      - Scene Editor with ability to play test edited scene;
      - Powerful system for binding properties into the editor;
      - Render system based on DX11 but created as multi API; so, adding support of another GAPI
        is planned;
      - Controls system based on aliases;
      - Font system based on stb_truetype.h;
      - Support of PhysX 3.0, there are samples in repo that use physics;
      - Network code which allows to create server/clinet; there is some code in repo which allows
        to create a simple network game
      I plan to use this engine in multiplayer game - so, I definitely will evolve the engine. Also
      I plan to add support for mobile devices. And of course, the main focus is to create a toolset
      that will ease games creation.
      Link to repo on source code is - https://github.com/ENgineE777/Atum
      Video of work process in track based editor can be at follow link: 
       
       

    • By mister345
      I made a spotlight that
      1. Projects 3d models onto a render target from each light POV to simulate shadows
      2. Cuts a circle out of the square of light that has been projected onto the render target
      as a result of the light frustum, then only lights up the pixels inside that circle 
      (except the shadowed parts of course), so you dont see the square edges of the projected frustum.
       
      After doing an if check to see if the dot product of light direction and light to vertex vector is greater than .95
      to get my initial cutoff, I then multiply the light intensity value inside the resulting circle by the same dot product value,
      which should range between .95 and 1.0.
       
      This should give the light inside that circle a falloff from 100% lit to 0% lit toward the edge of the circle. However,
      there is no falloff. It's just all equally lit inside the circle. Why on earth, I have no idea. If someone could take a gander
      and let me know, please help, thank you so much.
      float CalculateSpotLightIntensity(     float3 LightPos_VertexSpace,      float3 LightDirection_WS,      float3 SurfaceNormal_WS) {     //float3 lightToVertex = normalize(SurfacePosition - LightPos_VertexSpace);     float3 lightToVertex_WS = -LightPos_VertexSpace;          float dotProduct = saturate(dot(normalize(lightToVertex_WS), normalize(LightDirection_WS)));     // METALLIC EFFECT (deactivate for now)     float metalEffect = saturate(dot(SurfaceNormal_WS, normalize(LightPos_VertexSpace)));     if(dotProduct > .95 /*&& metalEffect > .55*/)     {         return saturate(dot(SurfaceNormal_WS, normalize(LightPos_VertexSpace)));         //return saturate(dot(SurfaceNormal_WS, normalize(LightPos_VertexSpace))) * dotProduct;         //return dotProduct;     }     else     {         return 0;     } } float4 LightPixelShader(PixelInputType input) : SV_TARGET {     float2 projectTexCoord;     float depthValue;     float lightDepthValue;     float4 textureColor;     // Set the bias value for fixing the floating point precision issues.     float bias = 0.001f;     // Set the default output color to the ambient light value for all pixels.     float4 lightColor = cb_ambientColor;     /////////////////// NORMAL MAPPING //////////////////     float4 bumpMap = shaderTextures[4].Sample(SampleType, input.tex);     // Expand the range of the normal value from (0, +1) to (-1, +1).     bumpMap = (bumpMap * 2.0f) - 1.0f;     // Change the COORDINATE BASIS of the normal into the space represented by basis vectors tangent, binormal, and normal!     float3 bumpNormal = normalize((bumpMap.x * input.tangent) + (bumpMap.y * input.binormal) + (bumpMap.z * input.normal));     //////////////// LIGHT LOOP ////////////////     for(int i = 0; i < NUM_LIGHTS; ++i)     {     // Calculate the projected texture coordinates.     projectTexCoord.x =  input.vertex_ProjLightSpace[i].x / input.vertex_ProjLightSpace[i].w / 2.0f + 0.5f;     projectTexCoord.y = -input.vertex_ProjLightSpace[i].y / input.vertex_ProjLightSpace[i].w / 2.0f + 0.5f;     if((saturate(projectTexCoord.x) == projectTexCoord.x) && (saturate(projectTexCoord.y) == projectTexCoord.y))     {         // Sample the shadow map depth value from the depth texture using the sampler at the projected texture coordinate location.         depthValue = shaderTextures[6 + i].Sample(SampleTypeClamp, projectTexCoord).r;         // Calculate the depth of the light.         lightDepthValue = input.vertex_ProjLightSpace[i].z / input.vertex_ProjLightSpace[i].w;         // Subtract the bias from the lightDepthValue.         lightDepthValue = lightDepthValue - bias;         float lightVisibility = shaderTextures[6 + i].SampleCmp(SampleTypeComp, projectTexCoord, lightDepthValue );         // Compare the depth of the shadow map value and the depth of the light to determine whether to shadow or to light this pixel.         // If the light is in front of the object then light the pixel, if not then shadow this pixel since an object (occluder) is casting a shadow on it.             if(lightDepthValue < depthValue)             {                 // Calculate the amount of light on this pixel.                 float lightIntensity = saturate(dot(bumpNormal, normalize(input.lightPos_LS[i])));                 if(lightIntensity > 0.0f)                 {                     // Determine the final diffuse color based on the diffuse color and the amount of light intensity.                     float spotLightIntensity = CalculateSpotLightIntensity(                         input.lightPos_LS[i], // NOTE - this is NOT NORMALIZED!!!                         cb_lights[i].lightDirection,                          bumpNormal/*input.normal*/);                     lightColor += cb_lights[i].diffuseColor*spotLightIntensity* .18f; // spotlight                     //lightColor += cb_lights[i].diffuseColor*lightIntensity* .2f; // square light                 }             }         }     }     // Saturate the final light color.     lightColor = saturate(lightColor);    // lightColor = saturate( CalculateNormalMapIntensity(input, lightColor, cb_lights[0].lightDirection));     // TEXTURE ANIMATION -  Sample pixel color from texture at this texture coordinate location.     input.tex.x += textureTranslation;     // BLENDING     float4 color1 = shaderTextures[0].Sample(SampleTypeWrap, input.tex);     float4 color2 = shaderTextures[1].Sample(SampleTypeWrap, input.tex);     float4 alphaValue = shaderTextures[3].Sample(SampleTypeWrap, input.tex);     textureColor = saturate((alphaValue * color1) + ((1.0f - alphaValue) * color2));     // Combine the light and texture color.     float4 finalColor = lightColor * textureColor;     /////// TRANSPARENCY /////////     //finalColor.a = 0.2f;     return finalColor; }  
      Light_vs.hlsl
      Light_ps.hlsl
  • Advertisement