• Create Account

## Sean O'Neils atmospheric scattering

Old topic!

Guest, the last post of this topic is over 60 days old and at this point you may not reply in this topic. If you wish to continue this conversation start a new topic.

14 replies to this topic

### #1Hyunkel  Members

401
Like
0Likes
Like

Posted 01 March 2012 - 06:32 PM

I'm trying to implement Sean O'Neils atmospheric scattering as explained here:
http://http.develope..._chapter16.html

My final goal is to combine his scattering shaders into a single shader that operates in screen space, without the need of additional geometry (such as a sphere for the atmosphere).
I think this shouldn't be too hard to do in a deferred renderer, where I can easily calculate the surface position of the atmosphere sphere.

Anyways, before I can tackle this task, I first need a working implementation of Sean O'Neils shaders.
Translating the shader into hlsl (or rather a dx effect) was rather straightforward:

// The number of sample points taken along the ray
static const int nSamples = 2;
static const float fSamples = (float)nSamples;
// Gravity
static const float g  = -0.98f;
static const float g2 =  0.9604f;
float4x4 WorldViewProjection;
float3 v3CameraPos;   // The camera's current position
float3 v3LightPos;   // The direction vector to the light source
float3 v3InvWavelength;  // 1 / pow(wavelength, 4) for the red, green, and blue channels
float fCameraHeight;  // The camera's current height
float fCameraHeight2;  // fCameraHeight^2
float fKrESun;	// Kr * ESun
float fKmESun;	// Km * ESun
float fKr4PI;	// Kr * 4 * PI
float fKm4PI;	// Km * 4 * PI
float fScaleDepth;   // The scale depth (the altitude at which the average atmospheric density is found)
float fInvScaleDepth;  // 1 / fScaleDepth
float fScaleOverScaleDepth; // fScale / fScaleDepth
// The scale equation calculated by Vernier's Graphical Analysis
float scale(float fCos)
{
float x = 1.0 - fCos;
return fScaleDepth * exp(-0.00287 + x*(0.459 + x*(3.83 + x*(-6.80 + x*5.25))));
}
// Calculates the Mie phase function
float getMiePhase(float fCos, float fCos2, float g, float g2)
{
return 1.5 * ((1.0 - g2) / (2.0 + g2)) * (1.0 + fCos2) / pow(abs(1.0 + g2 - 2.0*g*fCos), 1.5);
}
// Calculates the Rayleigh phase function
float getRayleighPhase(float fCos2)
{
//return 1.0;
return 0.75 + 0.75*fCos2;
}
// Returns the near intersection point of a line and a sphere
float getNearIntersection(float3 v3Pos, float3 v3Ray, float fDistance2, float fRadius2)
{
float B = 2.0 * dot(v3Pos, v3Ray);
float C = fDistance2 - fRadius2;
float fDet = max(0.0, B*B - 4.0 * C);
return 0.5 * (-B - sqrt(fDet));
}
// Returns the far intersection point of a line and a sphere
float getFarIntersection(float3 v3Pos, float3 v3Ray, float fDistance2, float fRadius2)
{
float B = 2.0 * dot(v3Pos, v3Ray);
float C = fDistance2 - fRadius2;
float fDet = max(0.0, B*B - 4.0 * C);
return 0.5 * (-B + sqrt(fDet));
}
struct VS_IN
{
float3 Position  : POSITION0;
};
struct PS_IN
{
float4 Position : SV_POSITION;
float3 PositionWS : TEXCOORD0;
};
PS_IN VS( VS_IN input )
{
PS_IN output = (PS_IN)0;
output.PositionWS = input.Position;
output.Position = mul(float4(input.Position, 1), WorldViewProjection);
return output;
}
float4 PS( PS_IN input ) : SV_Target
{
//return float4(0, 1, 0, 1);
// Get the ray from the camera to the vertex and its length (which is the far point of the ray passing through the atmosphere)
float3 v3Pos = input.PositionWS;
float3 v3Ray = v3Pos - v3CameraPos;
float fFar = length(v3Ray);
v3Ray /= fFar;
// Calculate the closest intersection of the ray with the outer atmosphere (which is the near point of the ray passing through the atmosphere)
float fNear = getNearIntersection(v3CameraPos, v3Ray, fCameraHeight2, fOuterRadius2);
// Calculate the ray's start and end positions in the atmosphere, then calculate its scattering offset
float3 v3Start = v3CameraPos + v3Ray * fNear;
fFar -= fNear;
float fStartAngle = dot(v3Ray, v3Start) / fOuterRadius;
float fStartDepth = exp(-fInvScaleDepth);
float fStartOffset = fStartDepth*scale(fStartAngle);
// Initialize the scattering loop variables
float fSampleLength = fFar / fSamples;
float fScaledLength = fSampleLength * fScale;
float3 v3SampleRay = v3Ray * fSampleLength;
float3 v3SamplePoint = v3Start + v3SampleRay * 0.5;
// Now loop through the sample rays
float3 v3FrontColor = float3(0.0, 0.0, 0.0);
for(int i=0; i<nSamples; i++)
{
float fHeight = length(v3SamplePoint);
float fDepth = exp(fScaleOverScaleDepth * (fInnerRadius - fHeight));
float fLightAngle = dot(v3LightPos, v3SamplePoint) / fHeight;
float fCameraAngle = dot(v3Ray, v3SamplePoint) / fHeight;
float fScatter = (fStartOffset + fDepth*(scale(fLightAngle) - scale(fCameraAngle)));
float3 v3Attenuate = exp(-fScatter * (v3InvWavelength * fKr4PI + fKm4PI));
v3FrontColor += v3Attenuate * (fDepth * fScaledLength);
v3SamplePoint += v3SampleRay;
}
// Finally, scale the Mie and Rayleigh colors and set up the varying variables for the pixel shader
float3 c0 = v3FrontColor * (v3InvWavelength * fKrESun);
float3 c1 = v3FrontColor * fKmESun;
float3 v3Direction = v3CameraPos - v3Pos;
float fCos = dot(v3LightPos, v3Direction) / length(v3Direction);
float fCos2 = fCos*fCos;
float3 color = getRayleighPhase(fCos2) * c0 + getMiePhase(fCos, fCos2, g, g2) * c1;
float4 AtmoColor = float4(color, color.b);
float4 PlanetColor = float4(1, 0, 0, 1);
return PlanetColor + AtmoColor;
}
technique10 Default
{
pass P0
{
}
}


This is pretty much a direct copy of O'Neils shader, except I'm doing everything in the pixel shader, and I color the sphere red to better visualize where the effect is applied.

Here are the shader constants I'm using:
float Km = 0.0025f;
float Kr = 0.0015f;
float ESun = 10.0f;
Vec3f v3InvWavelength = Vec3f(
1.0f / pow(0.650f, 4),
1.0f / pow(0.570f, 4),
1.0f / pow(0.475f, 4));
float fKrESun = Kr * ESun;
float fKmESun = Km * ESun;
float fKr4PI = Kr * 4.0f * Pi;
float fKm4PI = Km * 4.0f * Pi;
float fScaleDepth = 0.25f;
float fInvScaleDepth = 1.0f / fScaleDepth;
float fScaleOverScaleDepth = fScale / fScaleDepth;
Vec3f v3LightPos = Vec3f(0.5f, 0.5f, -0.5f);
v3LightPos.normalize();
Vec3f v3CameraPos = Engine->Scene->Camera->GetPosition();
float fCameraHeight = v3CameraPos.length();
float fCameraHeight2 = v3CameraPos.length_squared();


And this is what I'm getting:

Keep in mind, I added the red color to highlight the sphere surface.
As you can see, I'm not doing so well.
I noticed that the effect isn't incorporating the whole sphere, which makes me think there's a problem with the scaling parameters here:
float fScaleDepth = 0.25f;
float fInvScaleDepth = 1.0f / fScaleDepth;
float fScaleOverScaleDepth = fScale / fScaleDepth;


This doesn't really surprise me too much because I have trouble understanding what exactly the fScaleDepth parameter is supposed to be.
I'm using 0.25f because it's what was used in the GPU Gems example.
I've played with the parameters quite a bit but I can't seem to get it right.

I'd be grateful for any suggestions.

Cheers,
Hyu

### #2Hyunkel  Members

401
Like
0Likes
Like

Posted 02 March 2012 - 05:39 PM

I had an in-depth look at the example code provided with the GPU gems article and decided to match all my settings 1:1 to what was used in the example in the hopes to get it working.
Unfortunately most problems prevailed.

My rasterizer state was messed up, explaining some of the things going on the pic I posted earlier.
Even with that fixed, my Skyfromspace shader implementation is nowhere near working.

I then decided to switch to the Groundfromspace shader because I was stuck, and it seems to work rather well:

This is the GroundFromSpace shader applied to a sphere mesh with radius 10.00 (planet) with a diffuse color of (0.25, 0.25, 0.25)

on top of it I get this:

Both share the exact same shader constants.
Since the GroundFromSpace shader seems to be working rather nicely, I'm no longer convinced that the problem is related to the scaling parameters, especially since I copied the ones from Sean O'Neil exactly.
Using the new scaling factors also didn't change anything, so I'm pretty sure I had them set up right the first time around.

I tried to debug the shader as best as I could and found out that the v3FrontColor variable is definitely getting calculated incorrectly.
Unfortunately the calculations are rather complicated and I have trouble finding the issue.

Here's what I have for now:

// The number of sample points taken along the ray
static const int nSamples = 4;
static const float fSamples = 4.0f;
// Mie phase assymetry factor
static const float g  = -0.98f;
static const float g2 =  0.9604f;
float4x4 WorldViewProjection;
float3 v3CameraPos;   // The camera's current position
float3 v3LightPos;   // The direction vector to the light source
float3 v3InvWavelength;  // 1 / pow(wavelength, 4) for the red, green, and blue channels
float fCameraHeight;  // The camera's current height
float fCameraHeight2;  // fCameraHeight^2
float fKrESun;	// Kr * ESun
float fKmESun;	// Km * ESun
float fKr4PI;	// Kr * 4 * PI
float fKm4PI;	// Km * 4 * PI
float fScaleDepth;   // The scale depth (the altitude at which the average atmospheric density is found)
float fInvScaleDepth;  // 1 / fScaleDepth
float fScaleOverScaleDepth; // fScale / fScaleDepth
// The scale equation calculated by Vernier's Graphical Analysis
float scale(float fCos)
{
float x = 1.0 - fCos;
return fScaleDepth * exp(-0.00287 + x*(0.459 + x*(3.83 + x*(-6.80 + x*5.25))));
}
// Calculates the Mie phase function
float getMiePhase(float fCos, float fCos2, float g, float g2)
{
return 1.5 * ((1.0 - g2) / (2.0 + g2)) * (1.0 + fCos2) / pow(abs(1.0 + g2 - 2.0*g*fCos), 1.5);
}
// Calculates the Rayleigh phase function
float getRayleighPhase(float fCos2)
{
//return 1.0;
return 0.75 + 0.75*fCos2;
}
// Returns the near intersection point of a line and a sphere
float getNearIntersection(float3 v3Pos, float3 v3Ray, float fDistance2, float fRadius2)
{
float B = 2.0 * dot(v3Pos, v3Ray);
float C = fDistance2 - fRadius2;
float fDet = max(0.0, B*B - 4.0 * C);
return 0.5 * (-B - sqrt(fDet));
}
// Returns the far intersection point of a line and a sphere
float getFarIntersection(float3 v3Pos, float3 v3Ray, float fDistance2, float fRadius2)
{
float B = 2.0 * dot(v3Pos, v3Ray);
float C = fDistance2 - fRadius2;
float fDet = max(0.0, B*B - 4.0 * C);
return 0.5 * (-B + sqrt(fDet));
}
struct VS_IN
{
float3 Position  : POSITION0;
};
struct PS_IN
{
float4 Position : SV_POSITION;
float3 PositionWS : TEXCOORD0;
};
PS_IN VS( VS_IN input )
{
PS_IN output = (PS_IN)0;
output.PositionWS = input.Position;
output.Position = mul(float4(input.Position, 1), WorldViewProjection);
return output;
}
float4 PS_GroundFromSpace( PS_IN input ) : SV_Target
{
// Get the ray from the camera to the vertex and its length (which is the far point of the ray passing through the atmosphere)
float3 v3Pos = input.PositionWS;
float3 v3Ray = v3Pos - v3CameraPos;
v3Pos = normalize(v3Pos);
float fFar = length(v3Ray);
v3Ray /= fFar;
// Calculate the closest intersection of the ray with the outer atmosphere (which is the near point of the ray passing through the atmosphere)
float fNear = getNearIntersection(v3CameraPos, v3Ray, fCameraHeight2, fOuterRadius2);
// Calculate the ray's starting position, then calculate its scattering offset
float3 v3Start = v3CameraPos + v3Ray * fNear;
fFar -= fNear;
float fCameraAngle = dot(-v3Ray, v3Pos);
float fLightAngle = dot(v3LightPos, v3Pos);
float fCameraScale = scale(fCameraAngle);
float fLightScale = scale(fLightAngle);
float fCameraOffset = fDepth*fCameraScale;
float fTemp = (fLightScale + fCameraScale);
// Initialize the scattering loop variables
//gl_FrontColor = vec4(0.0, 0.0, 0.0, 0.0);
float fSampleLength = fFar / fSamples;
float fScaledLength = fSampleLength * fScale;
float3 v3SampleRay = v3Ray * fSampleLength;
float3 v3SamplePoint = v3Start + v3SampleRay * 0.5;
// Now loop through the sample rays
float3 v3FrontColor = float3(0.0, 0.0, 0.0);
float3 v3Attenuate;
for(int i=0; i<nSamples; i++)
{
float fHeight = length(v3SamplePoint);
float fDepth = exp(fScaleOverScaleDepth * (fInnerRadius - fHeight));
float fScatter = fDepth*fTemp - fCameraOffset;
v3Attenuate = exp(-fScatter * (v3InvWavelength * fKr4PI + fKm4PI));
v3FrontColor += v3Attenuate * (fDepth * fScaledLength);
v3SamplePoint += v3SampleRay;
}
float3 c0 = v3FrontColor * (v3InvWavelength * fKrESun + fKmESun);
float3 c1 = v3Attenuate;
float3 PlanetColor = float3(0.25, 0.25, 0.25);
return float4(c0 + PlanetColor * c1, 1);
}
float4 PS_SkyFromSpace( PS_IN input ) : SV_Target
{
//return float4(0, 1, 0, 1);
// Get the ray from the camera to the vertex and its length (which is the far point of the ray passing through the atmosphere)
float3 v3Pos = input.PositionWS;
float3 v3Ray = v3Pos - v3CameraPos;
float fFar = length(v3Ray);
v3Ray /= fFar;
// Calculate the closest intersection of the ray with the outer atmosphere (which is the near point of the ray passing through the atmosphere)
float fNear = getNearIntersection(v3CameraPos, v3Ray, fCameraHeight2, fOuterRadius2);
// Calculate the ray's start and end positions in the atmosphere, then calculate its scattering offset
float3 v3Start = v3CameraPos + v3Ray * fNear;
fFar -= fNear;
float fStartAngle = dot(v3Ray, v3Start) / fOuterRadius;
float fStartDepth = exp(-fInvScaleDepth);
float fStartOffset = fStartDepth*scale(fStartAngle);
// Initialize the scattering loop variables
float fSampleLength = fFar / fSamples;
float fScaledLength = fSampleLength * fScale;
float3 v3SampleRay = v3Ray * fSampleLength;
float3 v3SamplePoint = v3Start + v3SampleRay * 0.5;
// Now loop through the sample rays
float3 v3FrontColor = float3(0.0, 0.0, 0.0);
for(int i=0; i<nSamples; i++)
{
float fHeight = length(v3SamplePoint);
float fDepth = exp(fScaleOverScaleDepth * (fInnerRadius - fHeight));
float fLightAngle = dot(v3LightPos, v3SamplePoint) / fHeight;
float fCameraAngle = dot(v3Ray, v3SamplePoint) / fHeight;
float fScatter = (fStartOffset + fDepth*(scale(fLightAngle) - scale(fCameraAngle)));
float3 v3Attenuate = exp(-fScatter * (v3InvWavelength * fKr4PI + fKm4PI));
v3FrontColor += v3Attenuate * (fDepth * fScaledLength);
v3SamplePoint += v3SampleRay;
}
// Finally, scale the Mie and Rayleigh colors and set up the varying variables for the pixel shader
float3 c0 = v3FrontColor * (v3InvWavelength * fKrESun);
float3 c1 = v3FrontColor * fKmESun;
float3 v3Direction = v3CameraPos - v3Pos;
float fCos = dot(v3LightPos, v3Direction) / length(v3Direction);
float fCos2 = fCos*fCos;
float3 color = getRayleighPhase(fCos2) * c0 + getMiePhase(fCos, fCos2, g, g2) * c1;
float4 AtmoColor = float4(color, color.b);
return AtmoColor;
}
technique10 SkyFromSpace
{
pass P0
{
}
}
technique10 GroundFromSpace
{
pass P0
{
}
}


And this is how I render both spheres:
AtmosphereSphere = CMeshFactory::CreateSphere(this, 10.25f, 128, 128);
PlanetSphere = CMeshFactory::CreateSphere(this, 10.00f, 128, 128);


void CGraphicsManagerDX11::TestAtmosphere()
{
float Km = 0.0025f;
float Kr = 0.0015f;
float ESun = 10.0f;
Vec3f v3InvWavelength = Vec3f(
1.0f / pow(0.650f, 4),
1.0f / pow(0.570f, 4),
1.0f / pow(0.475f, 4));
float fKrESun = Kr * ESun;
float fKmESun = Km * ESun;
float fKr4PI = Kr * 4.0f * Pi;
float fKm4PI = Km * 4.0f * Pi;
float fScaleDepth = 0.25f;
float fInvScaleDepth = 1.0f / fScaleDepth;
float fScaleOverScaleDepth = fScale / fScaleDepth;
Vec3f v3LightPos = Vec3f(0.5f, 0.5f, -0.5f);
v3LightPos.normalize();
Vec3f v3CameraPos = Engine->Scene->Camera->GetPosition();
float fCameraHeight = v3CameraPos.length();
float fCameraHeight2 = fCameraHeight * fCameraHeight;
Context->OMSetRenderTargets(1, &BackBufferView, DepthBufferView);
Context->OMSetDepthStencilState(DepthStencilStates->Default, 0);
Context->IASetPrimitiveTopology(D3D11_PRIMITIVE_TOPOLOGY_TRIANGLELIST);
CEffect* Effect = EffectManager->Get(L"Scatter");
PlanetSphere->UseBuffers(Context);
Effect->GetTechniquePass("GroundFromSpace")->Apply(0, Context);
Context->DrawIndexed(PlanetSphere->IndexCount, 0, 0);
AtmosphereSphere->UseBuffers(Context);
Effect->GetTechniquePass("SkyFromSpace")->Apply(0, Context);
Context->DrawIndexed(AtmosphereSphere->IndexCount, 0, 0);
}


Any ideas where the issue could be?

Cheers,
Hyu

### #3Hyunkel  Members

401
Like
2Likes
Like

Posted 03 March 2012 - 12:43 PM

I seem to have misunderstood something rather important when reading the article, which is
that the atmosphere is supposed to be rendered behind the planet, not in front of it.
In retrospect I have no idea how I came to another conclusion.

After fixing this major issue I got much better results and have begun modifying the shaders so they can be directly applied to a gbuffer.
I currently perform all of the geometry shading in a single pass, and then
add the outer atmosphere in a second pass.
Both can be done in a single pass though, but this involves rather heavy dynamic branching.

This is where I'm currently at:

float fCameraAngle = dot(-v3Ray, v3Pos);


variable to

float fCameraAngle = 1.0f;


for ground shading though, because it would otherwise produce weird results when the camera is close to the inner radius.
I'm not really sure why that is though, but it works fine with 1.0f.

It still looks a bit odd, but that is mostly because I'm using a red planet, on whom an earth-like atmosphere feels out of place.
The sky is also quite a bit too dark. I'll have to tweak some values to get that right, but that shouldn't be too difficult.
Once I get that sorted, add some hdr, and get some work on my noise functions for the terrain generation done, it should look rather pretty.

Cheers,
Hyu

### #4swiftcoder  Senior Moderators

17805
Like
0Likes
Like

Posted 03 March 2012 - 01:33 PM

That looks pretty good. I too adapted Sean O'Neil's shaders to work in a single pass in a deferred renderer, for StarFall (albeit using GLSL).

It works rather well, though I'd really like to integrate Eric Bruneton's multiple-scattering at some point.

Tristam MacDonald - Software Engineer @ Amazon - [swiftcoding] [GitHub]

### #5Hyunkel  Members

401
Like
0Likes
Like

Posted 03 March 2012 - 02:34 PM

StarFall is looking pretty good!

Ideally I'd like to integrate Eric Bruneton's scattering as well.
In fact I tried to implement it before I adapted Sean O'Neil's shaders, but I had major trouble
understanding how to generate the 4d lookup texture.

I plan on getting back to it in the future though.

### #6melak47  Members

112
Like
0Likes
Like

Posted 04 March 2012 - 04:25 PM

I tried to implement O'Neils shaders as well, but the most I ever got was a handful of brightly colored triangles somewhere.
After I saw your post I figured I'd give the GroundFromSpace shader a try, and got it to work as well (for the most part. if the light is closer than 1 atmosphere radius, the lightScale value becomes an issue).
Hoewver I do not get the orange-ish color around the night/day terminator.

The SkyFromSpace shader however, I can't seem to get to work at all. How do you draw the atmosphere behind the planet?
Disable depth buffer writes, draw the atmosphere, then render the planet?
Doesn't the opaque planet skip any and all alpha blending with the transparent atmosphere if it is in front of it though?

Even rewriting my SkyFromSpace shader after your code, all I get is this: (GroundFromSpace in the background):

The underlying geometry is a geosphere, so that is likely just a single vertex's color lerping to the sourrounding 6 triangles.

### #7Hyunkel  Members

401
Like
0Likes
Like

Posted 04 March 2012 - 05:58 PM

Hoewver I do not get the orange-ish color around the night/day terminator.

The shaders are very sensitive to the scaling parameters.
Things go horribly wrong if one of them is off even by a small amount.
You also have to follow some rules, such as the planet center being at 0, 0, 0.
If you post the shader constants you're currently using I can check if something obvious is wrong.

How do you draw the atmosphere behind the planet?

Then change your rasterizer state so it culls front faces instead of back faces.

Then draw the atmosphere sphere using the AtmosphereFromSpace shader.
Since your culling is inverted, only the back half of the sphere will be drawn.
Pixels behind the planet will not be drawn because they will fail the depth test.

### #8melak47  Members

112
Like
0Likes
Like

Posted 04 March 2012 - 06:20 PM

I pass the object position to the shader and use vertex, camera and light position relative to that.

   constBuffer.samples = 3;
float Kr = 0.0025f;
float Km = 0.0010f;
float ESun = 20.0f;
float pi = 3.14159265358979323846f;
constBuffer.Kr4PI = Kr * 4 * pi;
constBuffer.Km4PI = Km * 4 * pi;
constBuffer.KrESun = Kr * ESun;
constBuffer.KmESun = Km * ESun;

constBuffer.rScaleDepth = 0.25f;
constBuffer.mScaleDepth = 0.1f;

constBuffer.CamPosW = XMFLOAT4(sin(t) * 5, 5, cos(t) * 5, 0.0f);
constBuffer.LightPos = XMFLOAT4(1000, 1000, 1000, 0.0f);
constBuffer.g = -0.99f;
constBuffer.WaveLength = XMFLOAT4(0.65f, 0.57f, 0.475f, 0.0f);


A few others I calculate in the shader:
	float3 CamPos = CamPosW - objPos;
float3 LightPos = LightPosW - objPos;
float3 InvWaveLength = 1.0 / pow(WaveLength.xyz, 4);
float scaleOverScaleDepth = fscale / rScaleDepth;
float altitude = length(CamPos);

float3 pos = mul(input.Pos, World).xyz - objPos.xyz;


### #9Hyunkel  Members

401
Like
0Likes
Like

Posted 04 March 2012 - 07:35 PM

v3LightPos is actually the light direction, not the light position, even though the name suggests otherwise.
It needs to be normalized.

mScaleDepth, which I am assuming is fInvScaleDepth needs to be 1 / ScaleDepth.

for "g" I would stick to -0.98f.
It doesn't really influence anything too much besides the fake sun when rendering the sky from atmosphere though, which you aren't doing.

Apart from that I can't find any problems.

### #10melak47  Members

112
Like
0Likes
Like

Posted 04 March 2012 - 07:54 PM

v3LightPos is actually the light direction, not the light position, even though the name suggests otherwise.
It needs to be normalized.

mScaleDepth, which I am assuming is fInvScaleDepth needs to be 1 / ScaleDepth.

for "g" I would stick to -0.98f.
It doesn't really influence anything too much besides the fake sun when rendering the sky from atmosphere though, which you aren't doing.

Apart from that I can't find any problems.

Normalizing LighgtDir/Pos gives me more colored specks, they flicker extremely fast and are hard to capture:

In O'Neils C++ soruce code there are both rayleighScaleDepth and mieScaleDepth, though all the shaders only use the rayleigh one.
InvScaleDepth is only used in this one line, so I opted for
float startDepth = exp(-1 / rScaleDepth);


Is the orange day/night transition in your renders a result of O'Neils shader, or something that you are doing?

SkyFromSpace.fx:
cbuffer perObject
{
float4x4 Scale;
float4x4 Rotation;
float4x4 Position;
float4 objColor;
float4 objPos;
int wire;
int textured;
};
cbuffer perFrame
{
float4x4 view;
float4x4 projection;

float4 CamPosW;
float4 LightPosW;
float4 WaveLength;
float samples;
float KrESun;
float KmESun;
float Kr4PI;
float Km4PI;
float g;
float rScaleDepth;
float mScaleDepth;
};
struct appdata
{
float4 Pos	: POSITION;
float3 Normal : NORMAL;
float3 UVW   : TEXCOORD;
};
struct v2p
{
float4 Pos : SV_POSITION;
float4 Color0 : COLOR0;
float4 Color1 : COLOR1;
float3 Direction : TEXCOORD;
};

float scale(float f)
{
float x = 1.0 - f;
return rScaleDepth * exp(-0.00287 + x*(0.459 + x*(3.83 + x*(-6.80 + x*5.25))));
}
// Calculates the Mie phase function
float getMiePhase(float fCos, float fCos2, float g, float g2)
{
return 1.5 * ((1.0 - g2) / (2.0 + g2)) * (1.0 + fCos2) / pow(abs(1.0 + g2 - 2.0*g*fCos), 1.5);
}
// Calculates the Rayleigh phase function
float getRayleighPhase(float fCos2)
{
//return 1.0;
return 0.75 + 0.75*fCos2;
}
// Returns the near intersection point of a line and a sphere
float getNearIntersection(float3 v3Pos, float3 v3Ray, float fDistance2, float fRadius2)
{
float B = 2.0 * dot(v3Pos, v3Ray);
float C = fDistance2 - fRadius2;
float fDet = max(0.0, B*B - 4.0 * C);
return 0.5 * (-B - sqrt(fDet));
}
v2p VS(appdata input)
{
//calculate prerequisites
float4x4 WVP = transpose(mul(mul(mul(mul(projection, view), Position), Rotation), Scale));
float4x4 World = transpose(mul(mul(Position,Rotation),Scale));

float3 CamPos = CamPosW - objPos;
float3 LightPos = LightPosW - objPos;
LightPos /= length(LightPos);

float3 InvWaveLength = 1.0 / pow(WaveLength.xyz, 4);
float scaleOverScaleDepth = fscale / rScaleDepth;
float altitude = length(CamPos);

float3 pos = mul(input.Pos, World).xyz - objPos.xyz;
float3 ray = pos - CamPos;
pos /= length(pos);
float far = length(ray);
ray /= far;
//initial values
float3 start = CamPos + ray * near;
far -= near;
float startAngle = dot(ray, start) / aRadius;
float startDepth = exp(-1 / rScaleDepth);
float startOffset = startDepth * scale(startAngle);
float sampleLength = far / samples;
float scaledLength = sampleLength * fscale;
float3 sampleRay = ray * sampleLength;
float3 samplePoint  = start + sampleRay * 0.5;
//loop through samples
float3 frontColor = float3(0,0,0);
for (int i=0; i<(int)samples; i++)
{
float height = length(samplePoint.xyz);
float depth = exp(scaleOverScaleDepth * (pRadius - height));
float lightAngle = dot(LightPos, samplePoint) / height;
float camAngle = dot(ray, samplePoint) / height;
float scatter = (startOffset + depth * (scale(lightAngle) - scale(camAngle)));
float3 attenuate = exp(-scatter * (InvWaveLength * Kr4PI + Km4PI));
frontColor += attenuate * (depth * scaledLength);
samplePoint += sampleRay;
}

float3 c0 = frontColor * (InvWaveLength.xyz * KrESun);
float3 c1 = frontColor * KmESun;
float3 direction = CamPos - pos;
float Cos = dot(LightPos, direction) / length(direction);

v2p output;
output.Pos = mul(input.Pos, WVP);
output.Color0 = float4(c0, Cos);
output.Color1 = float4(c1, 1);
output.Direction = float4(direction, 1);
return output;
}

float4 PS(v2p input) : SV_TARGET
{
float Cos = input.Color0.w;
float3 color = getRayleighPhase(Cos*Cos) * input.Color0 + getMiePhase(Cos, Cos*Cos, g, g*g) * input.Color1;
return float4(color, color.z);
}


And the GroundFromSpace.fx as well:
cbuffer perObject
{
float4x4 Scale;
float4x4 Rotation;
float4x4 Position;
float4 objColor;
float4 objPos;
int wire;
int textured;
};
cbuffer perFrame
{
float4x4 view;
float4x4 projection;

float4 CamPosW;
float4 LightPosW;
float4 WaveLength;
float samples;
float KrESun;
float KmESun;
float Kr4PI;
float Km4PI;
float g;
float rScaleDepth;
float mScaleDepth;
};
Texture2D objTexture;
SamplerState texSampler;

struct appdata
{
float4 Pos	: POSITION;
float3 Normal : NORMAL;
float3 UVW   : TEXCOORD;
};
struct v2p
{
float4 Pos : SV_POSITION;
float4 Color0 : COLOR0;
float4 Color1 : COLOR1;
float3 TexCoord : TEXCOORD;
};

float scale(float fCos)
{
float x = 1.0 - fCos;
return rScaleDepth * exp(-0.00287 + x*(0.459 + x*(3.83 + x*(-6.80 + x*5.25))));
}
// Returns the near intersection point of a line and a sphere
float getNearIntersection(float3 v3Pos, float3 v3Ray, float fDistance2, float fRadius2)
{
float B = 2.0 * dot(v3Pos, v3Ray);
float C = fDistance2 - fRadius2;
float fDet = max(0.0, B*B - 4.0 * C);
return 0.5 * (-B - sqrt(fDet));
}

v2p VS(appdata input)
{
//calculate prerequisites
float4x4 WVP = transpose(mul(mul(mul(mul(projection, view), Position), Rotation), Scale));
float4x4 World = transpose(mul(mul(Position,Rotation),Scale));

float3 CamPos = CamPosW - objPos;
float3 LightPos = LightPosW - objPos;
float3 LightDir = LightPos / length(LightPos);
float4 InvWaveLength = 1.0 / float4(pow(WaveLength.x, 4), pow(WaveLength.y, 4), pow(WaveLength.z, 4), 0);
float scaleOverScaleDepth = fscale / rScaleDepth;
float altitude = length(CamPos);

float3 pos = mul(input.Pos, World).xyz - objPos.xyz;
float3 ray = pos - CamPos;
pos /= length(pos);
float far = length(ray);
ray /= far;
//initial values
float3 start = CamPos + ray * near;
far -= near;
float camAngle = dot(-ray, pos);
float lightAngle = dot(LightDir, pos);
float camScale = scale(camAngle);
float lightScale = scale(lightAngle);
float camOffset = depth*camScale;
float temp = lightScale + camScale;
float sampleLength = far / samples;
float scaledLength = sampleLength * fscale;
float3 sampleRay = ray * sampleLength;
float3 samplePoint  = start + sampleRay * 0.5;
//loop through samples
float3 frontColor = float3(0,0,0);
float3 attenuate;
for (int i=0; i<(int)samples; i++)
{
float height = length(samplePoint.xyz);
depth = exp(scaleOverScaleDepth * (pRadius - height));
float scatter = (depth*temp) - camOffset;
attenuate = exp(-scatter * (InvWaveLength * Kr4PI + Km4PI));
frontColor += attenuate * (depth * scaledLength);
samplePoint += sampleRay;
}
v2p output;
output.Pos = mul(input.Pos, WVP);
output.Color0 = float4(frontColor * (InvWaveLength.xyz * KrESun + KmESun), lightAngle);
output.Color1 = float4(attenuate, 1);
output.TexCoord = input.UVW;
return output;
}
float4 PS(v2p input) : SV_TARGET
{
float4 planetColor;
if (textured) planetColor =  objTexture.Sample(texSampler, input.TexCoord.xyz);
else planetColor = objColor;
float lightAngle = input.Color0.w;
float4 color = input.Color0 + (planetColor * input.Color1);
color *= lightAngle;
color.w = 1.0;
return color;
}


### #11swiftcoder  Senior Moderators

17805
Like
1Likes
Like

Posted 04 March 2012 - 10:01 PM

You might want to think about working from the most current version of his shaders (poke around in 'downloads'), rather than the dated, and somewhat badly-commented version in GPU Gems...

Tristam MacDonald - Software Engineer @ Amazon - [swiftcoding] [GitHub]

### #12Hyunkel  Members

401
Like
0Likes
Like

Posted 05 March 2012 - 04:33 AM

Is the orange day/night transition in your renders a result of O'Neils shader, or something that you are doing?

It is a result of O'Neil's shaders.

I can't find any obvious mistakes in your code except for:

float4 Color0 : COLOR0;
float4 Color1 : COLOR1;

You should pass these values as TEXCOORD's, because directx clamps COLOR to the [0, 1] range.
This should only cause minor artifacts though.

You might want to think about working from the most current version of his shaders (poke around in 'downloads'), rather than the dated, and somewhat badly-commented version in GPU Gems...

Thank you for pointing this out! I had no idea.

### #13melak47  Members

112
Like
0Likes
Like

Posted 05 March 2012 - 04:59 AM

You might want to think about working from the most current version of his shaders (poke around in 'downloads'), rather than the dated, and somewhat badly-commented version in GPU Gems...

The thing is, I am!
I have started the SkyFromSpace shader from scratch about 6 times now, once from the GPU Gems article, once from the current version, once from the article in nvidia FX Composer, trying to debug it, again from the current version in fx composer, once from someone else's source code on here, and once from Hyunkel's code here.

I figured out why I didn't get the orange transition. Before I got the lightScale value right, the whole sphere was lit up so I multiplied the resulting diffuse by the lightAngle to make the far side of the planet dark. Now that the lightScale works correctly though, it was fading the diffuse out before the transition.
Now that it's got the right value, the far side of the planet turns dark on it's own.

I'm kind of clueless about the SkyFromSpace shader though...I guess I will take another stab at it sometime.

### #14dealeer  Members

95
Like
-2Likes
Like

Posted 06 December 2012 - 07:24 AM

C:\FxComposer.jpg

### #15dealeer  Members

95
Like
0Likes
Like

Posted 06 December 2012 - 07:43 AM

hello,
I come here baring a cry for help. I needed an atmosphere shader for the little project I am working on. so I convert the SkyFormSpace shader in the book GPU gems 2- Chapter 16, Accurate Atmospheric Scattering written by Sean O'Neil to SkyFormSpace.fx. I want to see the atmosphere effect in the software FX Composer . but the result is incorrect。

the effect in FxComposer:

and the effect in my project:

Here is the shader code i'm using:
// SkyFromSpace - Author: Sean O'Neil - Copyright (c) 2004 Sean O'Neil
//The output of this vertex shader
struct vertout
{
float4 pos : POSITION;  // Transformed vertex position
float4 c0  : COLOR0;   // The Rayleigh color
float4 c1  : COLOR1;   // The Mie color
float3 t0  : TEXCOORD0;
};
//The number of sample points taken along the ray
const int nSamples = 2;
const float fSamples =2.0;
//The scale depth (the altitude at which the average atmospheric density is found)
const float fScaleDepth = 0.25;
const float fInvScaleDepth = 4.0;
//The scale equation calculated by Vernier's Graphical Analysis
float scale(float fCos)
{
float x = 1.0 - fCos;
return fScaleDepth * exp(-0.00287 + x*(0.459 + x*(3.83 + x*(-6.80 + x*5.25))));
}
//Calculates the Mie phase function
float getMiePhase(float fCos, float fCos2, float g, float g2)
{
return 1.5 * ((1.0 - g2) / (2.0 + g2)) * (1.0 + fCos2) / pow(1.0 + g2 - 2.0*g*fCos, 1.5);
}
//Calculates the Rayleigh phase function
float getRayleighPhase(float fCos2)
{
//return 1.0;
return 0.75 + 0.75*fCos2;
}
//Returns the near intersection point of a line and a sphere
float getNearIntersection(float3 v3Pos, float3 v3Ray, float fDistance2, float fRadius2)
{
float B = 2.0 * dot(v3Pos, v3Ray);
float C = fDistance2 - fRadius2;
float fDet = max(0.0, B*B - 4.0 * C);
return 0.5 * (-B - sqrt(fDet));
}
//Returns the far intersection point of a line and a sphere
float getFarIntersection(float3 v3Pos, float3 v3Ray, float fDistance2, float fRadius2)
{
float B = 2.0 * dot(v3Pos, v3Ray);
float C = fDistance2 - fRadius2;
float fDet = max(0.0, B*B - 4.0 * C);
return 0.5 * (-B + sqrt(fDet));
}
//uniform float4x4 gl_ModelViewProjectionMatrix;
uniform float4x4 worldViewProj: WorldViewProjection;
uniform float3 v3CameraPos={0.0,0.0,25};// The camera's current position
uniform float3 v3LightPos={0.0,0.0,1};  // The direction vector to the light source
uniform float red = 0.650f;		  // 650 nm for red
uniform float green = 0.570f;		 // 570 nm for green
uniform float blue = 0.475f;		 // 475 nm for blue
uniform float Kr = 0.0025f;
uniform float Km = 0.0015f;
uniform float PI = 3.14159f;
uniform float ESun = 15.0f;
vertout v_SkyFormSpace(float4 gl_Vertex : POSITION)
{
float3 v3InvWavelength={1.0/pow(red, 4.0f),1.0/pow(green, 4.0f),1.0/pow(blue, 4.0f)};
float fCameraHeight=sqrt(v3CameraPos.x*v3CameraPos.x+v3CameraPos.y*v3CameraPos.y+v3CameraPos.z*v3CameraPos.z);
float fCameraHeight2=fCameraHeight*fCameraHeight;

float fKrESun=Kr * ESun;
float fKmESun=Km * ESun;
float fKr4PI=Kr * 4 * PI;
float fKm4PI=Km * 4 * PI;
float fScaleOverScaleDepth=fScale / fScaleDepth;

// Get the ray from the camera to the vertex and its length (which is the far point of the ray passing through the atmosphere)
float3 v3Pos = gl_Vertex.xyz;
float3 v3Ray = v3Pos - v3CameraPos;
float fFar = length(v3Ray);
v3Ray /= fFar;
// Calculate the closest intersection of the ray with the outer atmosphere (which is the near point of the ray passing through the atmosphere)
float fNear = getNearIntersection(v3CameraPos, v3Ray, fCameraHeight2, fOuterRadius2);
// Calculate the ray's start and end positions in the atmosphere, then calculate its scattering offset
float3 v3Start = v3CameraPos + v3Ray * fNear;
fFar -= fNear;
float fStartAngle = dot(v3Ray, v3Start) / fOuterRadius;
float fStartDepth = exp(-1.0 / fScaleDepth);
float fStartOffset = fStartDepth*scale(fStartAngle);
// Initialize the scattering loop variables
float fSampleLength = fFar / fSamples;
float fScaledLength = fSampleLength * fScale;
float3 v3SampleRay = v3Ray * fSampleLength;
float3 v3SamplePoint = v3Start + v3SampleRay * 0.5;
// Now loop through the sample rays
float3 v3FrontColor = float3(0.0, 0.0, 0.0);
for(int i=0; i<nSamples; i++)
{
float fHeight = length(v3SamplePoint);
float fDepth = exp(fScaleOverScaleDepth * (fInnerRadius - fHeight));
float fLightAngle = dot(v3LightPos, v3SamplePoint) / fHeight;
//float fCameraAngle = dot(v3Ray, v3SamplePoint) / fHeight;
float fCameraAngle = 1.0;
float fScatter = (fStartOffset + fDepth*(scale(fLightAngle) - scale(fCameraAngle)));
float3 v3Attenuate = exp(-fScatter * (v3InvWavelength * fKr4PI + fKm4PI));
v3FrontColor += v3Attenuate * (fDepth * fScaledLength);
v3SamplePoint += v3SampleRay;
}
// Finally, scale the Mie and Rayleigh colors and set up the varying variables for the pixel shader
vertout OUT;
OUT.pos = mul(gl_Vertex,worldViewProj);
OUT.c0.rgb = v3FrontColor * (v3InvWavelength * fKrESun);
OUT.c1.rgb = v3FrontColor * fKmESun;
OUT.t0 = v3CameraPos - v3Pos;
return OUT;
}
//uniform float3 v3LightPos;
uniform float g=-0.95;
uniform float g2=0.9025;
float4 p_SkyFormSpace(in float4 c0 : COLOR0,
in float4 c1 : COLOR1,
in float3 v3Direction : TEXCOORD0
) : COLOR
{
float fCos = dot(v3LightPos, v3Direction) / length(v3Direction);
float fCos2 = fCos*fCos;
float4 color = getRayleighPhase(fCos2) * c0 + getMiePhase(fCos, fCos2, g, g2) * c1;
color.a = color.b;
return color;
}
technique technique0 {
pass p0 {
CullMode = None;