Sign in to follow this  
dopplex

Shaders - Control over Rasterization?

Recommended Posts

dopplex    164
I'm having some issues with a silhouette rendering shader that I'm in the process of writing. Now, in its base form, it works - http://www.dopplex.net/EngineScreens/myScreenShot0.png is an example of the kind of output I'm putting together (the silhouette pass is overlaid on the shading pass in the screenshot). The basic approach is to generate polygonal fins on all edges, hide edges that shouldn't be rendered as silhouettes by making their polygon degenerate, and procedurally texture all remaining polygons in the pixel shader. (all vertices on the geometry have v = 0 in their uv coordinates, all vertices on fins have v = 1 - I evaluate the interpolated value of v to figure out how to shade and blend the pixel) The problem is that some edges - ones that have very thin polygonal fins, but which aren't degenerate - don't seem to be making it through the rasterizer and into the pixel shader for processing - or if they are, I'm losing them somehow. This results in broken lines, and looks pretty bad. What I'd like to have happen for sub-pixel sized edges is for the shader to get run for the pixel anyway, and to simply adjust the alpha blend amount based on how much of the pixel should be appearing (antialias it, essentially). I'm also trying to do something similar for my thicker edges - that is, anti-alias them - but I feel like I have a better handle on how to do that - even if it isn't working very well yet. So.. the questions: 1. How could I go about making sure that the really small edges still hit the pixel shader? 2. What would the best way be to pass information about the edge resolution to the pixel shader? The only vertices that will "know" this information are the fins - which means that it's going to be interpolated with whatever the base data is from the polygon vertices. (I guess that since I "know" the v interpolation value, I could use that to un-interpolate the other value... Seems a bit silly though) 3. Can anyone recommend me a good method for figuring out what portion of the edge is contained in the pixel? Right now I'm thinking of just using the resolution data I pass to do the equivalent of multisampling. I feel like there's probably a better way. Edit: 4. I'm having trouble figuring out how to properly calculate the length of my projection/extrusion vector in pixels. I think this may be due to some misunderstanding of screenspace. My assumption had been that after the WorldViewProject transformation, that the x and y coordinates would represent the vector's horizontal and vertical components in my screenspace. That is - lengthinpixels = sqrt(( myVec.x)^2 + myVec.y^2) if myVec has already been transformed by WorldViewProject. (AspectRatio also needs to be applied, but I think I'm getting off track before I hit that point) This doesn't seem to work. Where am I off? Here's the code listing for the shader. I apologize for it's lack of optimization and cleanliness - it's still a work in progress. float gLineThickness = 2.0f; float4x4 World; float4x4 View; float4x4 Project; float2 ScreenSize = float2(800,600); float4 AmbientLightColor; float4 EyePosition; float4 DiffuseColor; float3 OutlineParam = float3(.5f, .5f, .05f); float4 LightDirection; float4 LightDiffuseColor; float SpecularPower; float4 LightSpecularColor; sampler TextureSampler; struct VS_INPUT { float4 Position : POSITION0; float4 Normal : NORMAL0; float4 Tri1Normal : NORMAL1; float4 Tri2Normal : NORMAL2; float4 V1Normal : NORMAL3; float4 V2Normal : NORMAL4; float2 Tex : TEXCOORD3; float4 V1Position : TEXCOORD4; float4 V2Position : TEXCOORD5; float4 Flags : TEXCOORD6; }; struct VS_OUTPUT { float4 Position : POSITION0; float2 Tex : TEXCOORD0; float4 Normal : TEXCOORD1; float4 ViewDirection : TEXCOORD2; float2 ScaleFactor : TEXCOORD3; float4 Debug : TEXCOORD4; }; VS_OUTPUT Transform2(VS_INPUT Input) { VS_OUTPUT Output; float4x4 WorldViewProject = mul(mul(World, View), Project); float4 ObjectPosition = mul(Input.Position, World); float4 ViewVec = EyePosition - ObjectPosition; float4 ViewVec2 = mul(mul(EyePosition - ObjectPosition, View), Project); float4 OutputPosition = mul(Input.Position, WorldViewProject); float2 ScaleFactor = float2(1.0f, 1.0f); float4 Debug = (float4)0.0f; Output.Normal = mul(Input.Normal, World); Output.ViewDirection = EyePosition - ObjectPosition; Output.Tex = Input.Tex; if (Input.Flags.w >= 0) { float4 Tri1Normal = normalize(mul(Input.Tri1Normal, World)); float4 Tri2Normal = normalize(mul(Input.Tri2Normal, World)); float Tri1DotV = dot(Tri1Normal, ViewVec); float Tri2DotV = dot(Tri2Normal, ViewVec); float Det = Tri1DotV * Tri2DotV; OutputPosition = mul(Input.V1Position, WorldViewProject); float Det2 = dot(Tri1Normal, Tri2Normal); if (Det2 < .95f) Det = -1.0f; if (Det < 0 ) { float4 ExtrudeVec = float4(0.0f, 0.0f, 0.0f, 0.0f); if (Input.Flags.x==0) { ExtrudeVec = mul(normalize(Input.V1Normal),WorldViewProject); OutputPosition = mul(Input.V1Position, WorldViewProject); } if (Input.Flags.x>0) { ExtrudeVec = mul(normalize(Input.V2Normal),WorldViewProject); OutputPosition = mul(Input.V2Position, WorldViewProject); } ExtrudeVec = gLineThickness*ExtrudeVec; if (ExtrudeVec.x * ScreenSize.x < 1) { Debug.r = 1.0f; ScaleFactor.x = 1.0f / (ExtrudeVec.x * ScreenSize.x); } if (ExtrudeVec.y * ScreenSize.y < 1) { Debug.g = 1.0f; ScaleFactor.y = 1.0f / (ExtrudeVec.y * ScreenSize.y); } ExtrudeVec.xy = ExtrudeVec.xy * ScaleFactor; OutputPosition += gLineThickness * ExtrudeVec; } } Output.Debug = Debug; Output.ScaleFactor = ScaleFactor; Output.Position = OutputPosition; return Output; } struct PS_INPUT { float4 Color : COLOR0; float4 Position : POSITION0; float4 Tex : TEXCOORD0; float4 Normal : TEXCOORD1; float4 ViewDirection : TEXCOORD2; float2 ScaleFactor : TEXCOORD3; float4 Debug : TEXCOORD4; }; float4 BasicShader(PS_INPUT Input) : COLOR0 { float4 Color = (float4)0.0f; Color.w = 1.0f; float2 pUV = Input.Tex * (1.0f/length(Input.ScaleFactor)); Color.xyz = (float3)pUV.y; //return Color; //return float4(0.0f, 0.0f, 0.0f, 1.0f); //float4 tColor = (float4)Input.Debug; //tColor.w = 1.0f; //return tColor; float Alpha = (1.0f-abs(OutlineParam.x-pUV.y))/(1.0f-OutlineParam.y) - (1.0f-OutlineParam.z)*(1.0f/OutlineParam.z); Color = float4(0.0f, 0.0f, 0.0f, Alpha); return Color; float samp1Val = pow(Input.Tex.x,2)- Input.Tex.y; //return Color; //if (samp1Val<0) float Trans = 1.0f -Input.Tex.y; Trans = Trans * 20.0f; if (Input.Tex.y < .04) Trans = 25.0f * Input.Tex.y; Color = float4(0.0f, 0.0f, 0.0f, Trans); //return float4(0.0f, 0.0f, 0.0f, 0.0f); return Color; /* float4 Color = float4(0.0f, 0.0f, 0.0f, 0.0f); if (Input.Tex.w >= 0.1f) { clip(-1.0f); } if (Input.Tex.w < 0.1f) { Color = Input.Color; } return Color; */ } technique BasicShader { pass P0 { AlphaBlendEnable = True; SrcBlend = srcalpha; DestBlend = invsrcalpha; CullMode = None; VertexShader = compile vs_2_0 Transform2(); PixelShader = compile ps_2_0 BasicShader(); } } [Edited by - dopplex on April 23, 2008 1:52:02 PM]

Share this post


Link to post
Share on other sites

Create an account or sign in to comment

You need to be a member in order to leave a comment

Create an account

Sign up for a new account in our community. It's easy!

Register a new account

Sign in

Already have an account? Sign in here.

Sign In Now

Sign in to follow this