Hello
I have an issue in my deferred renderer (Dx9).
As seen in the attached image between my meshes there is a small black line.
This line becomes visible depending on the distance between the camera and the meshes.
The problem seems to be in the point light shader.
Since if i remove the light output and just draw color on the screenquad everything looks fine.
GBuffer:
VertexShaderOutput VertexShaderFunction(VertexShaderInput input)
{
VertexShaderOutput output;
float4 worldPos = mul( float4(input.Position.xyz, 1.0f), World );
output.Position = mul( worldPos, ViewProjection );
output.UV = input.UV;
output.Normal = mul( input.Normal, World );
output.Depth.x = output.Position.z;
output.Depth.y = output.Position.w;
return output;
}
PixelShaderOutput PixelShaderFunction(VertexShaderOutput input) : COLOR0
{
PixelShaderOutput output;
output.Color = tex2D( DiffuseMapSampler, input.UV );
output.Normal.xyz = ( input.Normal + 1.0f ) * 0.5f;
output.Normal.a = 1.0f;
float D = input.Depth.x / input.Depth.y;
output.Depth = float4( D, D, D, 1.0f );
output.Glow = tex2D( GlowMapSampler, input.UV );
return output;
}
Point Light:
VertexShaderOutput VertexShaderFunction(VertexShaderInput input)
{
VertexShaderOutput output;
float4 worldPos = mul( float4(input.Position.xyz, 1.0f), World );
output.Position = mul( worldPos, ViewProjection );
output.ScreenPosition = output.Position;
return output;
}
float4 PixelShaderFunction(VertexShaderOutput input) : COLOR0
{
//obtain screen position
input.ScreenPosition.xy /= input.ScreenPosition.w;
//obtain textureCoordinates corresponding to the current pixel
//the screen coordinates are in [-1,1]*[1,-1]
//the texture coordinates need to be in [0,1]*[0,1]
float2 uv = 0.5f * ( float2(input.ScreenPosition.x,-input.ScreenPosition.y) + 1.0f );
//allign texels to pixels
uv += HalfPixel;
//get normal data from the normalMap
float4 normalData = tex2D( NormalMapSampler, uv );
//tranform normal back into [-1,1] range
float3 normal = normalize( 2.0f * normalData.xyz - 1.0f );
//read depth
float depth = tex2D( DepthMapSampler, uv ).r;
//compute screen-space position
float4 position;
position.xy = input.ScreenPosition.xy;
position.z = depth;
position.w = 1.0f;
//transform to world space
position = mul( position, InversedViewProjection );
position /= position.w;
//surface-to-light vector
float3 lightVector = LightLocation - position;
//compute attenuation based on distance - linear attenuation
float attenuation = saturate( 1.0f - (length(lightVector) / LightRadius) );
//normalize light vector
lightVector = normalize( lightVector );
//compute diffuse light
float NdL = saturate( dot(normal, lightVector) );
float3 diffuseLight = NdL * Color.rgb * Intensity;
return float4( diffuseLight, 1.0f ) * attenuation;
}
Any help is much appreciated!