Jump to content

  • Log In with Google      Sign In   
  • Create Account

AdeptStrain

Member Since 19 Mar 2012
Offline Last Active Dec 18 2014 10:03 AM

Topics I've Started

SV_IsFrontFace and geometry created from points.

15 October 2014 - 09:02 AM

I've been trying to debug an issue where SV_IsFrontFace doesn't seem to be returning anything but "true" for some geometry I create from points. The MSDN points out that SV_IsFrontFace will always return true for points and lines, but I was hoping that wouldn't be true if triangle strips were generated via a geometry shader from those points - currently it seems that no matter what the geometry looks like at the end of the pipeline, if you start out with lines/points then SV_IsFrontFace will return true.

 

Has anyone else run into this? Or have any suggestions on ways around this problem?


Grass Rendering Questions

07 July 2014 - 12:54 PM

Hey all,

 

I've been working on grass rendering trying to achieve an effect much like Flower achieved.

 

I've got a basic setup working using points in the cells of a heightfield which are then extruded to grass blades in a geometry shader (I have the distribution set to 1 blade per cell while debugging). I'm running into a nasty hang up and wanted to ping the community for some ideas:

 

Right now I generate a one sided (although culling is disabled) triangle strip, this means I only have normals for 1 side of the strip. Ideally I'd like both sides to have normals so the blade is properly lit on both sides, but I see no way of doing that without either generating double the geometry or billboarding the grass blade (which I really don't want to do). Any thoughts?

 

Screenshot attached (note the blades are unlit currently and they need a random rotation applied since they currently look pretty nasty whenever you are perpendicular to them).


L16 and TextureObject.Load

16 June 2014 - 07:57 PM

I'm having a hell of a time getting an L16 texture (16bit heightfield data) read in my shader and was hoping someone could spot where I'm going wrong.

 

I'm pretty sure the logic is just some syntax issue in my shader, but just to verify I ran everything through RenderDoc and I can see the load call being executed, however it always returns 0. I've tried a few values to no avail, checking the pipeline I can see the resource is bound and I can see the values in the resource look correct - the shader just isn't reading it for some reason.

 

Any thoughts?

 

Shader:

float4x4 WorldViewProjection	: WorldViewProjection;

// Textures
Texture2D<float4> DiffuseTexture;
Texture2D<uint> HeightmapTexture;

float MaxHeight;

// Structures
struct VSInput
{
	float4 Position		: POSITION;
	float2 Uv		: TEXCOORD0;
};

struct VSOutput
{
	float4 Position		: SV_POSITION;
	float2 Uv		: TEXCOORD0;
};

sampler LinearClampSampler
{
	Filter = Min_Mag_Mip_Linear;
	AddressU = Clamp;
	AddressV = Clamp;
};

sampler PointClampSampler
{
	Filter = Min_Mag_Mip_Point;
	AddressU = Clamp;
	AddressV = Clamp;
};

// Helper Methods
float GetHeightmapValue(float2 uv)
{
	uint rawValue = HeightmapTexture.Load(uint3(uv.x, uv.y, 0)); // <- Always returns 0,0,0,0
	return ((float)rawValue/(256.0f * 256.0f)) * MaxHeight;
}

// Vertex shaders
VSOutput ForwardRenderVS(VSInput IN)
{
	VSOutput Out = (VSOutput)0;
	float3 position = IN.Position.xyz;
	position.y = GetHeightmapValue(IN.Uv);
	Out.Position = mul(float4(position, 1.0f), WorldViewProjection);
	Out.Uv = IN.Uv;
	return Out;
}


// Fragment shaders.
float4 ForwardRenderFP(VSOutput In) : FRAG_OUTPUT_COLOR0
{
	return float4(0.0f, 1.0f, 0.0f, 0.0f);
}

// Render states.
BlendState NoBlend
{
	BlendEnable[0] = FALSE;
	RenderTargetWriteMask[0] = 15;
};

BlendState LinearBlend
{
	BlendEnable[0] = TRUE;
	SrcBlend[0] = SRC_ALPHA;
	DestBlend[0] = INV_SRC_ALPHA;
	BlendOp[0] = ADD;
	SrcBlendAlpha[0] = ZERO;
	DestBlendAlpha[0] = ZERO;
	BlendOpAlpha[0] = ADD;
	BlendEnable[1] = FALSE;
	RenderTargetWriteMask[0] = 15;
};

DepthStencilState DefaultDepthState
{
	DepthEnable = TRUE;
	DepthWriteMask = All;
	DepthFunc = Less;
	StencilEnable = FALSE;
};

RasterizerState DefaultRasterState
{
	CullMode = None;
};

RasterizerState CullBackRasterState
{
	CullMode = Front;
};

// Techniques.
technique11 ForwardRender
{
	pass pass0
	{
		SetVertexShader( CompileShader( vs_4_0, ForwardRenderVS() ) );
		SetPixelShader( CompileShader( ps_4_0, ForwardRenderFP() ) );
	
		SetBlendState( NoBlend, float4( 0.0f, 0.0f, 0.0f, 0.0f ), 0xFFFFFFFF );
		SetDepthStencilState( DefaultDepthState, 0 );
		SetRasterizerState( DefaultRasterState );
	}
}

RenderDoc image.

 


Improving Terrain Textures (getting banding)

02 October 2012 - 07:18 PM

Hey all,

Recently trying to get back into terrain rendering and I'm having a bit of trouble with improving my texturing and unsure of where I should focus my efforts. I've been reading a lot of white papers by DICE's Johan Andersson(the various techniques like PN Triangles, slope based blending, Wang tiles, etc), but I'm not sure where to start. Right now I have a 257 x 257 coherent noise generated heightfield and I'm using a simple 2D texture array to texture things based on height. The banding is pretty autrocious and the fact that my UVs are set like they are 1 texture stretched over a quad is causing all the resolution to be lost in my texture samples (at least that's my current theory...).

Would increased tessellation (i.e. via a hull/domain shader) help? Or what would you recommend I would get the biggest bang for my buck as I go forward with this system?

Pic of current banding and shader code below.

Posted Image

//--------------------------------------------------------------------------------------
// Constant Buffer Variables
//--------------------------------------------------------------------------------------
cbuffer WVPConstantBuffer : register( b0 )
{
matrix World;
matrix View;
matrix Projection;
}
cbuffer DebugLight : register ( b1 )
{
	   float3 LightDir;
}
Texture2DArray txDiffuse : register( t0 );
Texture2D	  txNormal : register( t1 );
SamplerState samLinear : register( s0 );
//--------------------------------------------------------------------------------------
struct VS_INPUT
{
	float4 Pos : POSITION;
	float2 Tex : TEXCOORD0;
	float3 Normal : NORMAL;
	float3 Tangent : TANGENT;
};
struct VS_OUTPUT
{
	float4 Pos : SV_POSITION;
	float3 Tex : TEXCOORD0;
	float3 Normal : NORMAL;
	float3 Tangent : TANGENT;  
};
//--------------------------------------------------------------------------------------
// Vertex Shader
//--------------------------------------------------------------------------------------
VS_OUTPUT VS_Terrain(VS_INPUT input )
{
	VS_OUTPUT output = (VS_OUTPUT)0;
  
	output.Pos = mul( input.Pos, World );
	output.Tex = float3(input.Tex, input.Pos.y);// Cache off the world height value.
	output.Pos = mul( output.Pos, View );
	output.Pos = mul( output.Pos, Projection );
  
	// Normal/Tan to World space.
	output.Normal = mul(float4(input.Normal, 0.0f) , World).xyz;
	output.Tangent = mul(float4(input.Tangent, 0.0f), World).xyz;
  
	return output;
}
float4 PS_Terrain( VS_OUTPUT input ) : SV_Target
{
	float4 darkDirt  = txDiffuse.Sample(samLinear, float3(input.Tex.xy, 1.0f));
	float4 lightDirt = txDiffuse.Sample(samLinear, float3(input.Tex.xy, 2.0f));
	float4 grass	 = txDiffuse.Sample(samLinear, float3(input.Tex.xy, 3.0f));
	float4 rock	  = txDiffuse.Sample(samLinear, float3(input.Tex.xy, 4.0f));
	float4 snow	  = txDiffuse.Sample(samLinear, float3(input.Tex.xy, 5.0f));
  
	float4 color;
  
	float heightPercentage = input.Tex.z / 100.0f; // current max height allowed, need to move this to a constant buffer.
	float blend = 1.0f - input.Normal.y; // slope value between the layers. Closer to 90 degrees, we blend more of that texture.
  
	if(heightPercentage < 0.05f)
	{
	   color = darkDirt;
	}
	else if (heightPercentage < 0.15f)
	{
	   color = lerp(darkDirt, lightDirt, blend);
	}
	else if(heightPercentage < 0.4f)
	{
	   color = lerp(lightDirt, grass, blend);
	}
	else if(heightPercentage < 0.7f)
	{
	   color = lerp(grass, rock, blend);	
	}
	else
	{
	   color = lerp(rock, snow, blend);
	}
  
	// Normal mapping.
  
	float3 normalMapVal = txNormal.Sample(samLinear, input.Tex.xy).rgb;
  
	// From Texel to -1.0f -> 1.0f
	normalMapVal = 2.0f * normalMapVal - 1.0f;
  
	float3 N = input.Normal;
	float3 T = normalize(input.Tangent - dot(input.Tangent, N)*N).xyz;
	float3 B = cross(N, T).xyz;
	float3x3 TBN = float3x3(T, B, N);

	// Transform from tangent space to world space.
	float3 bumpedNormal = normalize(mul(normalMapVal, TBN)).xyz;
  
	color *= dot(bumpedNormal, -LightDir.xyz);
  
	color.a = 1.0f;
  
	return color;
}


Best way to organize HLSL code in DX11?

30 August 2012 - 08:03 AM

Hey all,

Given the way DirectX 11 works with shaders (and that effects are finally tossed to the wayside), I thought about organizing all my vertex shaders/pixel shaders/etc into one "library"-esque file. For the more complex shaders I would branch those out into their own files for organization sake, but for the basic stuff it seemed fitting to move everything into one file. Are there potential issues with this approach?

HLSL file below.
//--------------------------------------------------------------------------------------
// Constant Buffer Variables
cbuffer WVPConstantBuffer : register( b0 )
{
matrix World;
matrix View;
matrix Projection;
}

cbuffer GradientConstantBuffer : register(b1)
{
float4 sourceColor;
float4 destColor;
}
//--------------------------------------------------------------------------------------
// Textures
Texture2D diffuseTex : register( t0 );
//--------------------------------------------------------------------------------------
// Sampler states
SamplerState linearSamp : register( s0 );
//--------------------------------------------------------------------------------------
// Various supported structs
struct VS_InTexturedVert
{
	float4 Pos : POSITION;
	float2 Tex : TEXCOORD0;
};

struct VS_OutTexturedVert
{
	float4 Pos : SV_POSITION;
	float2 Tex : TEXCOORD0;
};

struct VS_InColoredVert
{
	float4 Pos : POSITION;
	float4 Color : COLOR0;
};

struct VS_OutColoredVert
{
	float4 Pos : SV_POSITION;
	float4 Color : COLOR0;
};
//--------------------------------------------------------------------------------------
// Vertex Shaders
VS_OutTexturedVert VS_TexturedVertWVP(VS_InTexturedVert input ) // Basic World x View x Projection vert shader.
{
	VS_OutTexturedVert output = (VS_OutTexturedVert)0;
	output.Pos = mul( input.Pos, World );
	output.Pos = mul( output.Pos, View );
	output.Pos = mul( output.Pos, Projection );
	output.Tex = input.Tex;
	return output;
}

VS_OutTexturedVert VS_TexturedVert(VS_InTexturedVert input ) // No world x view x projection multiplication so everything will be in screen space.
{
	VS_OutTexturedVert output = (VS_OutTexturedVert)0;
	output.Pos = input.Pos;
	output.Tex = input.Tex;
	return output;
}

VS_OutColoredVert VS_ColoredVertWVP(VS_InColoredVert input ) // Colored vertex, world x view x projection multi.
{
	VS_OutColoredVert output = (VS_OutColoredVert)0;
	output.Pos = mul( input.Pos, World );
	output.Pos = mul( output.Pos, View );
	output.Pos = mul( output.Pos, Projection );
	output.Color = input.Color;
	return output;
}

//--------------------------------------------------------------------------------------
// Pixel Shaders
float4 PS_TextureSample( VS_OutTexturedVert input ) : SV_Target0 // Simple texture sample pixel shader.
{
	return diffuseTex.Sample( linearSamp, input.Tex );
}

float4 PS_Gradient( VS_OutTexturedVert input ) : SV_Target0 // Lerp between two color values based on texture coordinates.
{
	float4 color;
  
	color[0] = smoothstep(sourceColor[0], destColor[0], input.Tex.y);
	color[1] = smoothstep(sourceColor[1], destColor[1], input.Tex.y);
	color[2] = smoothstep(sourceColor[2], destColor[2], input.Tex.y);
	color[3] = smoothstep(sourceColor[3], destColor[3], input.Tex.y);
  
	return color;
}

float4 PS_Colored( VS_OutColoredVert input ) : SV_Target0 // Simple colored vertex pixel shader.
{
	return input.Color;
}

PARTNERS