Need a little help with my Atmospheric Scatter

Started by
5 comments, last by niyoko 10 years, 4 months ago

Hello,

I've been trying to make an atmospheric scatter shader with DirectX11 by following an old post on this site:
http://www.gamedev.net/topic/621187-sean-oneils-atmospheric-scattering/

I've had some luck with the sky from space, and I feel like I'm getting closer to getting things right. Though, I'm at a loss as to what I'm doing wrong at the moment.

The rasterizer is setup to draw the surface using back face culling, and the atmosphere using front face culling:

Back Face:








// Setup the rasterizer description
	// The rasterizer desc. will determin how and what polygons are drawn
	rasterBackCullDesc.AntialiasedLineEnable = false;
	rasterBackCullDesc.CullMode = D3D11_CULL_BACK;
	rasterBackCullDesc.DepthBias = 0;
	rasterBackCullDesc.DepthBiasClamp = 0.0f;
	rasterBackCullDesc.DepthClipEnable = true;
	rasterBackCullDesc.FillMode = D3D11_FILL_SOLID;
	rasterBackCullDesc.FrontCounterClockwise = false;
	rasterBackCullDesc.MultisampleEnable = false;
	rasterBackCullDesc.ScissorEnable = false;
	rasterBackCullDesc.SlopeScaledDepthBias = 0.0f;

	// Create the rasterizer state from the desription
	result = m_device->CreateRasterizerState(&rasterBackCullDesc, &m_rasterBackCullState);
	if(FAILED(result))
	{
		return false;
	}

Front Face:






// Setup the rasterizer description for Front Face Culling
	rasterFrontCullDesc.AntialiasedLineEnable = false;
	rasterFrontCullDesc.CullMode = D3D11_CULL_FRONT;
	rasterFrontCullDesc.DepthBias = 0;
	rasterFrontCullDesc.DepthBiasClamp = 0.0f;
	rasterFrontCullDesc.DepthClipEnable = true;
	rasterFrontCullDesc.FillMode = D3D11_FILL_SOLID;
	rasterFrontCullDesc.FrontCounterClockwise = false;
	rasterFrontCullDesc.MultisampleEnable = false;
	rasterFrontCullDesc.ScissorEnable = false;
	rasterFrontCullDesc.SlopeScaledDepthBias = 0.0f;

	// Create the rasterizer state from the desription
	result = m_device->CreateRasterizerState(&rasterFrontCullDesc, &m_rasterFrontCullState);
	if (FAILED(result))
	{
		return false;
	}

My camera's initial position is:






// Initialize the Camera object
m_Camera->SetPosition(0.0f, 0.0f, -40.0f);

My light source's position is (0,0,0).
The direction of the light is setup as:






// Initialize the Light object 
m_Light->SetDirection(0.5f, 0.5f, -0.5f); 

On each draw frame I compute the data needed for the shader:


// Atmosphere Data
	float km = 0.0025f;
	float kr = 0.0015f;
	float eSun = 15.0f;

	D3DXVECTOR3 invWaveLength = D3DXVECTOR3(
		(1.0f / pow(0.650f, 4.0f)),
		(1.0f / pow(0.570f, 4.0f)),
		(1.0f / pow(0.475f, 4.0f)));
	float outerRadius = 10.25f;
	float outerRadius2 = outerRadius * outerRadius;
	float innerRadius = 10.0f;
	float innerRadius2 = innerRadius * innerRadius;
	float krESun = kr * eSun;
	float kmESun = km * eSun;
	float kr4Pi = kr * 4.0f * M_PI;
	float km4Pi = km * 4.0f * M_PI;
	float scaleDepth = 0.25f;
	float invScaleDepth = 1.0f / scaleDepth;
	float scale = 1.0f / (outerRadius - innerRadius);
	float scaleOverScaleDepth = scale / scaleDepth;
	D3DXVECTOR3 lightPosition;
	D3DXVec3Normalize(&lightPosition, &lightDirection);
	D3DXVECTOR3 cameraPosNorm;
	D3DXVec3Normalize(&cameraPosNorm, &cameraPosition);
	float cameraHeight = D3DXVec3Length(&cameraPosition);
	float cameraHeight2 = cameraHeight * cameraHeight;

	// Get a pointer to the data in the constant buffer
	lightBuffer = (LightBufferType*)mappedResource.pData;

	// Copy the lighting variables into the constant buffer
	lightBuffer->ambientColor = ambientColor;
	lightBuffer->diffuseColor = diffuseColor;
	lightBuffer->lightDirection = lightDirection;
	lightBuffer->padding = 0.0f;
	lightBuffer->padding2 = 0.0f;
	lightBuffer->cameraPosition = cameraPosition;
	lightBuffer->lightPosition = lightPosition;
	lightBuffer->cameraHeight = cameraHeight;
	lightBuffer->cameraHeight2 = cameraHeight2;
	lightBuffer->invWaveLength = invWaveLength;
	lightBuffer->outerRadius = outerRadius;
	lightBuffer->outerRadius2 = outerRadius2;
	lightBuffer->innerRadius = innerRadius;
	lightBuffer->innerRadius2 = innerRadius2;
	lightBuffer->krESun = krESun;
	lightBuffer->kmESun = kmESun;
	lightBuffer->kr4Pi = kr4Pi;
	lightBuffer->km4Pi = km4Pi;
	lightBuffer->scaleDepth = scaleDepth;
	lightBuffer->invScaleDepth = invScaleDepth;
	lightBuffer->scale = scale;
	lightBuffer->scaleOverScaleDepth = scaleOverScaleDepth;

My shader code for the sky from space shader:










////////////////////////////////////////////////////////////////////////////////
// Filename: atmosphereic.ps
////////////////////////////////////////////////////////////////////////////////

/////////////
// GLOBALS //
/////////////
cbuffer LightBuffer
{
	float4 ambientColor;
	float4 diffuseColor;
	float3 lightDirection;
	float padding;
	float padding2;
	float3 cameraPosition; // The camera's current position
	float3 lightPosition; // The direction vector to the light source
	float3 invWaveLength; // 1 / pow(wavelength, 4) for the red, green, and blue channels
	float cameraHeight; // The camera's current height
	float cameraHeight2; // fCameraHeight^2
	float outerRadius; // The outer (atmosphere) radius
	float outerRadius2; // fOuterRadius^2
	float innerRadius; // The inner (planetary) radius
	float innerRadius2; // fInnerRadius^2
	float krESun; // Kr * ESun
	float kmESun; // Km * ESun
	float kr4Pi; // Kr * 4 * PI
	float km4Pi; // Km * 4 * PI
	float scaleDepth; // The scale depth (the altitude at which the average atmospheric density is found)
	float invScaleDepth; // 1 / fScaleDepth
	float scale; // 1 / (fOuterRadius - fInnerRadius)
	float scaleOverScaleDepth; // fScale / fScaleDepth
};

//////////////
// TYPEDEFS //
//////////////
struct PixelInputType
{
	float4 position : SV_POSITION;
	float2 tex : TEXCOORD0;
	float3 positionWS : TEXCOORD1;
	float3 normal : NORMAL;
};

//////////////////
// SAMPLE TOTAL //
//////////////////
static const int nSamples = 4;
static const float fSamples = 4.0;

///////////////////////////////
// MIE PHASE ASYMETRY FACTOR //
///////////////////////////////
static const float g = -0.98;
//static const float g2 = g*g;

////////////////////
// LOCALS METHODS //
////////////////////
float getScaleFromCos(float cos)
{
	float x = 1.0 - cos;
	return scaleDepth * exp(-0.00287 + x * (0.459 + x * (3.83 + x * (-6.80 + x * 5.25))));
}

float getMiePhase(float cos, float cos2, float g, float g2)
{
	return 1.5 * ((1.0 - g2) / (2.0 + g2)) * (1.0 + cos2) / pow(abs(1.0 + g2 - 2.0 * g * cos), 1.5);
}

float getRayleighPhase(float cos2)
{
	return 0.75 + 0.75 * cos2;
}

float getNearIntersection(float3 position, float3 ray, float distance2, float radius2)
{
	float B = 2.0 * dot(position, ray);
	float C = distance2 - radius2;
	float det = max(0.0, B * B - 4.0 * C);

	return 0.5 * (-B - sqrt(det));
}

////////////////////////////////////////////////////////////////////////////////
// Pixel Shader
////////////////////////////////////////////////////////////////////////////////
float4 AtmosphericPixelShader(PixelInputType input) : SV_TARGET
{
	// Get the ray from the camera to the vertex and its length
	// This is the far point of the ray passing through the atmosphere
	float3 position = input.position - input.positionWS;
	float3 ray = position - cameraPosition;

	position = normalize(position);

	float far = length(ray);
	ray /= far;

	// Calculate the closest intersection of the ray with the outeratmosphere
	// Which is the near point of the ray passing through the atmosphere
	float near = getNearIntersection(cameraPosition, ray, cameraHeight2, outerRadius2);

	// Calculate the ray's starting position, then calculate its scattering offset
	float3 start = cameraPosition + ray * near;

	far -= near;

	float startAngle = dot(ray, start) / outerRadius;
	float startDepth = exp(-invScaleDepth);
	float startOffset = startDepth * getScaleFromCos(startAngle);

	// Initialize the scattering loop variables
	float sampleLength = far / fSamples;
	float scaledLength = sampleLength * scale;
	float3 sampleRay = ray * sampleLength;
	float3 samplePoint = start + sampleRay * 0.5;

	// Now loop through the sample rays
	float3 frontColor = float3(0.0, 0.0, 0.0);
	float3 attenuate;

	for(int i = 0; i < nSamples; i++)
	{
		float height = length(samplePoint);
		float depth = exp(scaleOverScaleDepth * (innerRadius - height));
		float lightAngle = dot(lightPosition, samplePoint) / height;
		float cameraAngle = dot(ray, samplePoint) / height;
		float scatter = (startOffset + depth * (getScaleFromCos(lightAngle) - getScaleFromCos(cameraAngle)));
		attenuate = exp(-scatter * (invWaveLength * kr4Pi + km4Pi));
		frontColor += attenuate * (depth * scaledLength);
		samplePoint += sampleRay;
	}

	float3 c0 = frontColor * (invWaveLength * krESun);
	float3 c1 = frontColor * kmESun;
	float3 direction = cameraPosition - position;
	float cos = dot(lightPosition, direction) / length(direction);
	float cos2 = cos * cos;
	float3 color = getRayleighPhase(cos2) * c0 + getMiePhase(cos, cos2, g, g*g) * c1;
	return float4(color, color.b);
}

Here is a photo from the default camera position:

[attachment=18963:atmoshpereFromSpace_01.png]

Here is another one as I just more closer to the planet.

[attachment=18964:atmoshpereFromSpace_02.png]

Another photo from a closer angle and a little above the default position:

[attachment=18965:atmoshpereFromSpace_03.png]


I'm also trying to a ground shader with little luck, but that is next.
It feels like I have my camera or light position/direction wrong. I can see a little of the effect in the atmosphere, but the data I'm giving it might be wrong. If anyone can see where I've gone wrong or has some advice, it would be greatly appreciated.

Thanks.

Advertisement

I forgot to include my vertex shader as well:


////////////////////////////////////////////////////////////////////////////////
// Filename: atmosphereic.vs
////////////////////////////////////////////////////////////////////////////////

/////////////
// GLOBALS //
/////////////
cbuffer MatrixBuffer
{
matrix worldMatrix;
matrix viewMatrix;
matrix projectionMatrix;
};

//////////////
// TYPEDEFS //
//////////////
struct VertexInputType
{
float4 position : POSITION;
float2 tex : TEXCOORD0;
float3 normal : NORMAL;
};

struct PixelInputType
{
float4 position : SV_POSITION;
float2 tex : TEXCOORD0;
float3 positionWS : TEXCOORD1;
float3 normal : NORMAL;
};

////////////////////////////////////////////////////////////////////////////////
// Vertex Shader
////////////////////////////////////////////////////////////////////////////////
PixelInputType AtmosphericVertexShader(VertexInputType input)
{
PixelInputType output;

output.positionWS = mul(input.position.xyz, (float3x3)worldMatrix);
output.positionWS = normalize(output.positionWS);

// Normalize the normal vector
//output.positionWS = normalize(output.positionWS);

// Change the position vector to be 4 units for proper matrix calculations
input.position.w = 1.0f;

// Calculate the position of the vertex against the world, view, and projection matrices
output.position = mul(input.position, worldMatrix);
output.position = mul(output.position, viewMatrix);
output.position = mul(output.position, projectionMatrix);

// Store the texture coordinates for the pixel shader
output.tex = input.tex;

// Calculate the normal vector against the world matrix only
output.normal = mul(input.normal, (float3x3)worldMatrix);

// Normalize the normal vector
output.normal = normalize(output.normal);

return output;
};

I've been playing around with the PixelInputType.positionWS and the cameraPosition to see how changing them effects the visual output. In previous tests and now, I'm freely changing the camera position, but I'm curious to see how normalizing the position affects the atmosphere.

Haven't looked at everything, and unless I'm misunderstanding you're intent, these two bits caused me to raise my eyebrows a bit:

From your vertex shader:


output.positionWS = mul(input.position.xyz, (float3x3)worldMatrix);
output.positionWS = normalize(output.positionWS);

From your pixel shader:


// Get the ray from the camera to the vertex and its length
// This is the far point of the ray passing through the atmosphere
float3 position = input.position - input.positionWS;
float3 ray = position - cameraPosition;

First, I'm assuming positionWS is supposed to be the vertex's worldspace position. If that's true, then you should be multiplying float4(input.position, 1) times worldMatrix. Multiplying input.position.xyz times the upper 3x3 of worldMatrix doesn't really make sense. The translation encoded in worldMatrix won't get applied if you just multiply by the upper 3x3. Also, normalizing the result really doesn't make sense. Since position is a point, you typically don't want to normalize it, as normalization is an operation usually reserved for vectors that represent directions (like normals, rays, etc).

Second, in the pixel shader, input.position probably isn't what you think it is. The SV_Position semantic in a pixel shader is in viewport space (so it's x ranges from TopLeftX to TopLeftX + Width, it's y from TopLeftY to TopLeftY + Height, z from MinDepth to MaxDepth).

If you want to calculate the ray from the camera to the point, you should set the ray like so:


// VS:
output.positionWS = mul(float4(input.position.xyz, 1), worldMatrix);

// PS:
float3 ray = input.positionWS.xyz - cameraPosition.xyz;

There may be more, but those were the things I noticed right off.

Samith,

Thanks for looking at things. You were right about what I was trying to do with the positionWS, I recalled that the position is altered when going from the vertex to pixel shader, though I seemed to forget about that when doing the operation in the pixel shader. ohmy.png I had thought that using the upper 3x3 of the matrix was the right thing to do, but after reading what you typed. I see that in this situation is wasn't what I want to do.

I changed a bit to reflect the mistake that you caught










// VS:
output.positionWS = mul(float4(input.position.xyz, 1), worldMatrix);

//PS:
float3 position = input.positionWS.xyz;
float3 ray = position - cameraPosition.xyz;

[attachment=18984:atmoshpereFromSpace_04.png]

I'm getting a little more color towards blue. I wonder if my color function is not off a bit. Thanks again for spotting one of my, probably many, mistakes,but It helped me out. I'm going to keep working at getting this fixed.

I took a little break from the code and came back to it today. I still felt like there was something up with my positioning related to my camera or light. The light positioning is not so much about the position as is it about the direction the light is emitting from, so it is normalized. I think i have that part right.

The next parts I'm supicious about have been the camera position and camera height. After some advice from the previous poster and some thought i'm pretty sure that the camera position does not need to be normalized. That leaves be with camera height. In an example on this site ( http://www.gamedev.net/topic/621187-sean-oneils-atmospheric-scattering/ ) the poster used a Vec3f function called length(). I'm pretty sure that that Vec3f is not standard DirectX, so I found D3DXVec3Length() and used that to find the cameraHeight.




float cameraHeight = D3DXVec3Length(&cameraPosition); 

float cameraHeight2 = cameraHeight * cameraHeight; 

I don7t know if I'm doing that right, but I decided to move my calculation of the camera height / altitude into my pixel shader.




float fCameraHeight = length(cameraPosition.xyz); 
float fCameraHeight2 = fCameraHeight * fCameraHeight; 

After doing that I've gotten what looks to be a more accurate positioning of the outer atmosphere than I had before.

[attachment=19065:atmoshpereFromSpace_06.png]

I'm still trying to work out why the atmosphere is solid white. I have a feeling that there is something up in my color calculation.






float4 AtmosphericPixelShader(PixelInputType input) : SV_TARGET
{
float fCameraHeight = length(cameraPosition.xyz);
float fCameraHeight2 = fCameraHeight * fCameraHeight;

// Get the ray from the camera to the vertex and its length
// This is the far point of the ray passing through the atmosphere
float3 position = input.positionWS;
float3 ray = input.positionWS - cameraPosition;

float far = length(ray);
ray /= far;

// Calculate the closest intersection of the ray with the outeratmosphere
// Which is the near point of the ray passing through the atmosphere
float near = getNearIntersection(cameraPosition, ray, fCameraHeight2, outerRadius2);

// Calculate the ray's starting position, then calculate its scattering offset
float3 start = cameraPosition + ray * near;

far -= near;

float startAngle = dot(ray, start) / outerRadius;
//float startDepth = exp(scaleOverScaleDepth * (innerRadius - fCameraHeight));
float startDepth = exp(-invScaleDepth);
float startOffset = startDepth * getScaleFromCos(startAngle);

// Initialize the scattering loop variables
float sampleLength = far / fSamples;
float scaledLength = sampleLength * scale;
float3 sampleRay = ray * sampleLength;
float3 samplePoint = start + sampleRay * 0.5;

// Now loop through the sample rays
float3 frontColor = float3(0.0, 0.0, 0.0);

for(int i = 0; i < 2; i++)
{
	float height = length(samplePoint);
	float depth = exp(scaleOverScaleDepth * (innerRadius - height));
	float lightAngle = dot(lightPosition, samplePoint) / height;
	float cameraAngle = dot(-ray, samplePoint) / height;
	float scatter = (startOffset + depth * (getScaleFromCos(lightAngle) - getScaleFromCos(cameraAngle)));
	float3 attenuate = exp(-scatter * (invWaveLength.xyz * kr4Pi + km4Pi));
	frontColor += attenuate * (depth * scaledLength);
	samplePoint += sampleRay;
}

float3 c0 = frontColor * (invWaveLength * krESun);
float3 c1 = frontColor * kmESun;
float3 direction = cameraPosition - position;
float cos = saturate(dot(lightPosition, direction) / length(direction));
float cos2 = cos * cos;
float3 color = getRayleighPhase(cos2) * c0 + getMiePhase(cos, cos2, g, g2) * c1;
return float4(color, color.b);
}

Hello, im busy at the moment but maybe tomorrow i post you all the hlsl files for Atmosperhic SCattering

Montify, Thank you for offering to do that to help me out. I've been busy as well and have had to pause solving the shader. I worked on it earlier this week and got better results and color graduation, but the outer atmosphere still was not rendering accurately. It was going from white to blue to purple to black. I can take some screen shots and opst them a bit later.

This topic is closed to new replies.

Advertisement