Jump to content

  • Log In with Google      Sign In   
  • Create Account


SSAO failure


Old topic!
Guest, the last post of this topic is over 60 days old and at this point you may not reply in this topic. If you wish to continue this conversation start a new topic.

  • You cannot reply to this topic
3 replies to this topic

#1 LPVOID   Members   -  Reputation: 106

Like
0Likes
Like

Posted 08 September 2013 - 05:47 AM

i was try to get ssao. but get the  bad result .please tell me where the wrong is,thank you.
(~~forgive my poor English spelling)

 

here is the screen shot

 

ssao: (if i changed some parameters ,the screen divided to 4x4 part)

texS.jpg

 

normal

SN.jpg

 

tex

tex.jpg

 

////////////////////////////////////////////////////////////////

//here is the main ps code:

float4 main(PS_IN pIn):SV_TARGET
{
    //view space position
    float3 posInView=GetViewPos(pIn.texCoor);
    //view space deepth
    float deep=posInView.z;
    // vectors in a sphere,we get the scaler by deepth
    float SphereR=GetSamRadius(deep);
    //view space normal,
    //(when render to G buffer ,i did not uniform normal to 0.0-1.0)
    float3 n=gBuff_1.Sample(Sam_Point_Warp,pIn.texCoor).xyz;
    int sampleCount=32;
    float ssaoRate=0;
    for(int idx=0;idx<sampleCount;++idx)
    {
        //Get a random vector
        float3 ranVec3=randVec[idx];
        //reflect the random vector by a noise texture
        ranVec3=normalize(RefByNoise(pIn.texCoor,ranVec3));
        //flip the vector if it is in the down sphere
        if(dot(n,ranVec3)<0)
        {
            ranVec3=-ranVec3;
        }
        //scale the vector
        ranVec3=ranVec3*SphereR;
        //find the pos witch need to be sampled
        float3 posForSample=posInView+ranVec3;
        //get UV
        float2 uvForSample=viewPosToUV(posForSample);
        //get Real pos
        float3 viewPosInScene=GetViewPos(uvForSample);
        ssaoRate+=SSAO(posForSample,viewPosInScene,posInView,SphereR);
    }
    ssaoRate/=sampleCount;
    ssaoRate=1-ssaoRate;
    return float4(ssaoRate,ssaoRate,ssaoRate,1);
}

///////////////////////////

here are some decls and functions(in the shader file ,these codes are on the top)

//AOScanDis means the rate of screen wide
//for example :  AOScanDis=0.1 ,screen wide=1024 ,we will sample within 102 pixel  
float GetSamRadius(float distance)
{
    return tanH_FOV*distance*2.0*AOScanDis;
}

float3 GetRefVec3(in float3 ray,in float3 n)
{
    return ray-2*n*dot(n,ray);
}

float3 RefByNoise(float2 UV,float3 vec)
{
    float2 targUV=(float2) 0;
    targUV.x=UV.x*GBufferWH.x/NorTexSize;
    targUV.y=UV.y*GBufferWH.y/NorTexSize;
    float3 noiseVec=gRefNosize.Sample(Sam_Point_Warp,targUV).xyz*2-1;
    return GetRefVec3(vec,noiseVec);
}
float3 GetViewPosFromZ(float2 UV)
{
    float p=zBuffer.Sample(Sam_Point_Warp,UV).x;
}
float3 GetViewPos(float2 UV)
{
    return gBuff_2.Sample(Sam_Point_Warp,UV).xyz;
}

//convert view pos to uv coord
float2 viewPosToUV(float3 p)
{
    //1. convert projection to the plane where z=1
    p.xy=p.xy/p.z;
    //get half size of wide and height, tanH_FOV=tan(half size of camera horizontal  angle)
    float w=tanH_FOV;
    float h=BackBufferWH.y/BackBufferWH.x*w;
    //Get Length rate
    p.x=p.x/w;
    p.y=p.y/h;
    //transform camera xy to texture uv
    float2 UV=(float2)0;
    UV.x=p.x*0.5+0.5;
    UV.y=-p.y*0.5+0.5;
    return UV;
}

//1st prama : pos for sample
//2nd: the real view pos
//3rd: center pos
//4th: Radius
float SSAO(float3 pS,float3 pR,float3 pC,float sphereR)
{
    if(pS.z>pR.z)
    {
        float occDis=length(pR-pC);
        if(occDis<=AOep)
        {
            return 0;
        }
        else
        {
            return max((1-pow((occDis-AOep)/sphereR,AOIntensity)),0);
        }
    }
    else
    {
        return 0;
    }
}

///////////////////////////////////////////////

cbuffer texPP:register(b1)
{
    float2 UVBlurBufferWH;
    float2 BackBufferWH;
    float2 GBufferWH;
    float2 fTimeMtime;
    float2 value;

    float           AOScanDis;
    float           tanH_FOV;
    float           AOEp;//if distance less than AOEp ,no AO occured
    float           AOIntensity;
    float           NorTexSize;
}

struct PS_IN
{
    float4 pos:SV_POSITION;
    float2 texCoor:TEXCOORD0;
};
Texture2D       gBuff_0 : register( t0 );//Texture
Texture2D       gBuff_1 : register( t1 );//Normal
Texture2D       gBuff_2 : register( t2 );//Position
Texture2D       zBuffer : register( t3 );//Depth
Texture2D   gRefNosize: register( t4 );

SamplerState   Sam_Point_Warp:   register(s0);

Edited by LPVOID, 08 September 2013 - 09:35 PM.


Sponsor:

#2 Migi0027   Crossbones+   -  Reputation: 1511

Like
0Likes
Like

Posted 08 September 2013 - 06:49 AM

Welcome to the forums!

 

PS. Next time organize your code with the tool that the thread editor gives you, it will make it much more clear. (It's these <> things in the editor)

 

How do you construct the Normal + Position + Depth maps? (That was my major issue back then)

If I'm not wrong they need to be view space normalized (modal space can also be taken into consideration).

 

PS. I might not be the best person to assist you, as I've accomplished my SSAO in a different manner, but I'll give it a shot!

-MIGI0027


Edited by Migi0027, 08 September 2013 - 08:54 AM.


#3 LPVOID   Members   -  Reputation: 106

Like
0Likes
Like

Posted 08 September 2013 - 09:24 AM

Thanks for your reading!

 

the G buffer defined like this

R16 ---------G16----------B16---------A16                
{colorR}        {colorG}        {colorB}         {alpha}                           
{norR}        {norG}            {norB}         {glow}                           
{posR}        {posG}            {posB}         {matID}                           

 

Z's format is DXGI_FORMAT_R32_TYPELESS.

Z is not compute manully    m_pDevice->OMSetRenderTargets(3,pGBuffer->ppRTV,pGBuffer->pZRT);

 

 

static mesh rendering code is here

//these codes show how to Rendering static mesh To GBuffer
//Rendering terrain is similer to it;

float4x4 gV:VIEW;
float4x4 gWV:WORLDVIEW;
float4x4 gWVP:WORLDVIEWPROJ;

Texture2D diffTex:DIFFUSETEX;
Texture2D specTex:SPECULARTEX;//not used
Texture2D glowTex:GLOWTEX;//not used
Texture2D norTex: NORTEX;//not used

//some state 
BlendState noBlend
{
    BlendEnable[0] = TRUE;
};

BlendState NoBlend
{
    BlendEnable[0] = FALSE;
};

DepthStencilState dsState
{
   DepthFunc = LESS_EQUAL;
   DepthEnable = TRUE;
   DepthWriteMask =1;
   
};

RasterizerState rState
{
 FillMode = Solid;
 CullMode = 3;
};


SamplerState defaultSam
{
    Filter = MIN_MAG_MIP_LINEAR;
    AddressU = Wrap;
    AddressV = Wrap;
};

struct VS_IN
{
   float4 posL: POSITION;
   float3 nor: NORMAL;
   float2 texCoor:TEXCOORD;
};
struct PS_IN
{
   float4 pos:SV_POSITION;
   float3 nor:NORMAL;
   float2 texCoor:TEXCOORD0;
   float4 posView:TEXCOORD1;
};

struct PS_OUTPUT
{
	float4 G0 : SV_Target0;//xyz=diffuse, w=alpha
	float4 G1 : SV_Target1;//xyz=norInView,w=glow
	float4 G2:  SV_Target2;//xyz=posInView , w=materail ID
};

PS_IN VS(VS_IN vIn)
{
  PS_IN pIn;
  pIn.pos=mul(vIn.posL,gWVP);
  pIn.posView=mul(vIn.posL,gWV);
//When Rendering terrain ,I use" mul(vIn.nor,(float3x3)gV)," instead of gWV.
//because terrain's height come from vertex texture displacement. 
//Using gV can avoid normal to be scaled
  pIn.nor=normalize(mul(vIn.nor,(float3x3)gWV));
  pIn.texCoor=vIn.texCoor;
  return pIn;
}
PS_OUTPUT PS(PS_IN pIn)
{
   PS_OUTPUT pOut =(PS_OUTPUT) 0.0;
   float4 colorSuface=diffTex.Sample(defaultSam,pIn.texCoor);
   if(colorSuface.w<0.5f)
   {
		clip(-1);
   }
   else
   {
       pOut.G0=colorSuface;
       pOut.G1=float4(pIn.nor,0);
       pOut.G1.w=0;
       pOut.G2=pIn.posView;
       pOut.G2.w=0;
   }
   return pOut;
}

technique10 DEFFERD_RENDER
{
  pass P0
 {
    SetVertexShader( CompileShader( vs_4_0, VS() ) );
    SetGeometryShader( NULL );
    SetPixelShader( CompileShader( ps_4_0, PS() ) );

    SetDepthStencilState(dsState, 0);
    SetRasterizerState(rState);
    SetBlendState(noBlend, float4( 0.0f, 0.0f, 0.0f, 0.0f ), 0xFFFFFFFF );
  }
}

In the stage of rendering to G, fx was used.

In the stage of "G to backBuffer" ,I'm not using fx but the individual vertexShader and pixelShader.

 

In the 2nd stage ,I found that when i use the function "mul(gV_Invert,float4(posInView,1.0))", the vertex can't tranformd to world space..does this make SSAO failure?


Edited by LPVOID, 08 September 2013 - 09:36 AM.


#4 LPVOID   Members   -  Reputation: 106

Like
0Likes
Like

Posted 08 September 2013 - 09:44 AM

for more information

I past the Terrain.fx

///////////////////////////纹理/////////////////////////////////////////////////
Texture2D  txDetail_Water:DETAIL_WATER;
Texture2D  txDetail_Stone:DETAIL_STONE;
Texture2D  txDetail_Sand:DETAIL_SAND;
Texture2D  txDetail_Grass:DETAIL_GRASS;
Texture2D  txDetail_Bush:DETAIL_BUSH;
Texture2D  txDetail_Tree:DETAIL_TREE;
Texture2D  txDetail_Rock:DETAIL_ROCK;
Texture2D  txDetail_City:DETAIL_CITY;
Texture2D  txDetail_Snow:DETAIL_SNOW;

Texture2D txSuface:SURFACETEX;
Texture2D txDiff:DIFFUSETEX;
Texture2D txHeight:HEIGHTTEX;

Texture2D WaterNormal:TEX_WATER_N;

//water normal ,tbn space to model space空间转换到模型空间
static  float3x3 M33_TBN_To_M = { 1.0f,0.0f,0.0f,0.0f,0.0f,-1.0f,0.0f,1.0f,0.0f };
//地表材质
//water  0   0.000   0
//stone  1   0.125   32
//sand   2   0.250   64
//Grass  3   0.375   96
//bush   4   0.500   128
//Tree   5   0.625   160
//Rock   6   0.750   192
//City   7   0.875   224
//snow   8   1.000   255
//细节贴图的近处和远处差值//blend the draft tex and detail
static float2 detailMinMax=float2(200.0f,1000.0f);

SamplerState SamHeight
{
    Texture = (txHeight);
    Filter = ANISOTROPIC;
    //Filter = MIN_MAG_MIP_POINT;
    AddressU = Mirror;
    AddressV = Mirror;
    MaxAnisotropy=4;
};
SamplerState SamTexture
{
    Filter = ANISOTROPIC;
    AddressU = Wrap;
    AddressV = Wrap;
    MaxAnisotropy=8;
};
SamplerState PointSample
{
    Filter = MIN_MAG_MIP_POINT;
    AddressU = Wrap;
    AddressV = Wrap;
};



///////////////////////////参数//////////////////////////////////////////////////
cbuffer transformDate
{
	float4x4 m44_V:VIEW;//
	float4x4 m44_WV_NOSCALE:WV_NOSCALE;
	float4x4 m44_WV:WORLDVIEW;
	float4x4 m44_WVP:WORLDVIEWPROJ;
	float3    v3_CamPos:CAMPOS;
	float4	 exPP:EXTEND_PP;//DetailRepeat  HeigntMapWidth areaWidth,segs
	float4	exPP_1:EXTEND_PP_1;//scaleFnK,scaleFnB,fTime,mMTimeLoop(Mtime在0-2Pi变化)
//terrain height= tex.r* scaleFnK+scaleFnB;
	float4x4 m44_WVP_L:WORLDVIEWPROJ_L;
	float2 WaveUVOffset0:W_UV_0;
	float2 WaveUVOffset1:W_UV_1;
	float   WaveUVRepeat:W_UV_REPEAT;
}

struct VS_IN
{
   float4 posL: POSITION;
   float3 nor: NORMAL;
   float2 texCoor:TEXCOORD;
};

struct PS_IN_MAIN
{
   float4 pos:SV_POSITION;
   float3 norView:NORMAL;
   float2 texCoor:TEXCOORD0;
   float4 posView:TEXCOORD1;
};

struct PS_OUT_MAIN
{
	float4 G0 :		SV_Target0;//texture
	float4 G1 :		SV_Target1;//nor
	float4 G2:		SV_Target2;//pos
};
struct PS_IN_L
{
	float4 pos:SV_POSITION;
};

float3 GetModelPos(in float2 UV)
{
	float x=(UV.x-0.5)*exPP.z;
	float z=(0.5-UV.y)*exPP.z;
	float y=tex2Dlod(SamHeight, float4(UV, 0,0)).r*exPP_1.x+exPP_1.y;
	return float3(x,y,z);
}
/////////////光照 VS//////////////
PS_IN_L VS_Light(VS_IN vIn)
{
    PS_IN_L vOut=(PS_IN_L) 0;
    float4 tranformedPos=vIn.posL;
    tranformedPos.y=tex2Dlod(SamHeight, float4(vIn.texCoor, 0,0)).r*exPP_1.x+exPP_1.y;
   vOut.pos=mul(tranformedPos,m44_WVP_L);
	return vOut;
}
///////////光照PS////////////////
//只写深度缓冲,因此不输出//only need z,no output
void PS_Light(PS_IN_L pIn)
{
	
}
///////////////////////////////////////////////////////////////
////////////////////////////////////////////////////////////////
//9种细节贴图则将1/8=0.125,得到9个颜色分布,靠近
static float surfaceTypeDistance=0.125f;
/////////主体VS////////////
PS_IN_MAIN		VS_Main(VS_IN vIn)
{
	PS_IN_MAIN vOut=(PS_IN_MAIN) 0;
	float UVOffset=1/(exPP.w-1);
        float4 tranformedPos=vIn.posL;
   	float3 PosC=GetModelPos(vIn.texCoor);
        tranformedPos.y=PosC.y;
        //Compute normal
   	float3 PosL=GetModelPos(vIn.texCoor+float2(-UVOffset,0));
	float3 PosR=GetModelPos(vIn.texCoor+float2(UVOffset,0));
	float3 PosUp=GetModelPos(vIn.texCoor+float2(0,-UVOffset));
	float3 PosDown=GetModelPos(vIn.texCoor+float2(0,UVOffset));
	float3 dirUp=PosUp-PosC;
	float3 dirRight=PosR-PosC;
	float3 dirDown=PosDown-PosC;
	float3 dirLeft=PosL-PosC;
	float3 nor=normalize(cross(dirUp,dirRight));
	 nor+=normalize(cross(dirRight,dirDown));
	nor+=normalize(cross(dirDown,dirLeft));
	nor+=normalize(cross(dirLeft,dirUp)); 
	nor=normalize(nor);
	vOut.norView=normalize(mul(nor,(float3x3)m44_V));

   vOut.texCoor=vIn.texCoor;
   vOut.pos=mul(tranformedPos,m44_WVP);
   vOut.posView=mul(tranformedPos,m44_WV);
   return vOut;
}
////////主体PS////////////
PS_OUT_MAIN PS_Main(PS_IN_MAIN pIn)
{
	PS_OUT_MAIN pOut=(PS_OUT_MAIN) 0;
	// float3  normalColor= txNor.Sample(SamTexture, pIn.texCoor).xyz*2-1; 
	 pOut.G1.xyz=pIn.norView;//mul(normalize(normalColor),(float3x3)m44_V);
	 //pOut.G1.xyz=normalColor; 
	 pOut.G1.w=0;
	 
 // float3 worldNor=normalize(tex2Dlod(SamNor, float4(vIn.texCoor, 0,0))*2-1);
   ///纹理混合
   //(x>=y true)
   float detailUVRepeat=10.0f;
   float3 texD_0=txDetail_Water.Sample(		SamTexture,		pIn.texCoor*detailUVRepeat*5).rgb;
   float3 texD_1=txDetail_Stone.Sample(		SamTexture,		pIn.texCoor*detailUVRepeat*5).rgb;
   float3 texD_2=txDetail_Sand.Sample(		SamTexture,		pIn.texCoor*detailUVRepeat*100).rgb;
   float3 texD_3=txDetail_Grass.Sample(	SamTexture,		pIn.texCoor*detailUVRepeat*10).rgb;
   float3 texD_4=txDetail_Bush.Sample(		SamTexture,		pIn.texCoor*detailUVRepeat*10).rgb;
   float3 texD_5=txDetail_Tree.Sample(	SamTexture,		pIn.texCoor*detailUVRepeat).rgb;
   float3 texD_6=txDetail_Rock.Sample(SamTexture,		pIn.texCoor*detailUVRepeat).rgb;
   float3 texD_7=txDetail_City.Sample(		SamTexture,		pIn.texCoor*detailUVRepeat).rgb;
   float3 texD_8=txDetail_Snow.Sample(		SamTexture,		pIn.texCoor*detailUVRepeat).rgb;

   float surfValue=txSuface.Sample(SamTexture, pIn.texCoor).r;
   //确定surface纹理定义的种类和混合
   //surface color blend
   float tempV=surfValue/0.125f;

   int intPart=(int)floor(tempV);
   float floatPart=frac(tempV);
   float3 detailMix;
   switch(intPart)
   {
	case 0:
		{
			detailMix=lerp(texD_0,texD_1,floatPart);
			float3 wN_0=WaterNormal.Sample(SamTexture,(pIn.texCoor+WaveUVOffset0)*WaveUVRepeat).xyz*2-1;
			float3 wN_1=WaterNormal.Sample(SamTexture,(pIn.texCoor-WaveUVOffset1)*WaveUVRepeat).xyz*2-1;
			float3 wN=normalize(wN_0+wN_1);
			wN=mul(wN,M33_TBN_To_M);
			wN=mul(wN,(float3x3)m44_V);
			float3 OkN=normalize(pIn.norView*0.7+wN*0.3);
			pOut.G1.xyz=OkN;
		}
		break;
	case 1:
	     detailMix=lerp(texD_1,texD_2,floatPart);
		break;
	case 2:
		detailMix=lerp(texD_2,texD_3,floatPart);
		break;
	case 3:
	    detailMix=lerp(texD_3,texD_4,floatPart);
		break;
	case 4:
		detailMix=lerp(texD_4,texD_5,floatPart);
		break;
	case 5:
		detailMix=lerp(texD_5,texD_6,floatPart);
		break;
	case 6:
		detailMix=lerp(texD_6,texD_7,floatPart);
		break;
	case 7:
		detailMix=lerp(texD_7,texD_8,floatPart);
		break;
	case 8:
		detailMix=texD_8;
		break;
   }
   float3 diffTex=txDiff.Sample(SamTexture,pIn.texCoor).rgb;
   float rateOfDiffTex=saturate((pIn.posView.z-detailMinMax.x)/(detailMinMax.y-detailMinMax.x));
   pOut.G0.xyz=lerp(detailMix,diffTex,rateOfDiffTex);//
   pOut.G0.w=1.0;//alpha
   pOut.G2=pIn.posView;
   pOut.G2.w=intPart;//材质ID
   return pOut;
}
///////////////////////////////////////////////////


BlendState NoBlend
{
    BlendEnable[0] = FALSE;
};

DepthStencilState dsState
{
   DepthFunc = LESS_EQUAL;
   DepthEnable = TRUE;
   DepthWriteMask =1;
};

//RasterizerState rsSolid
//{
//FillMode = Solid;
//CullMode = 3;
//};

technique10 main
{
  pass P0
  {
       SetVertexShader( CompileShader( vs_4_0, VS_Main() ) );
       SetGeometryShader( NULL );
       SetPixelShader( CompileShader( ps_4_0, PS_Main() ) );
       SetDepthStencilState(dsState, 0);
       // SetRasterizerState(rsSolid);
       SetBlendState( NoBlend, float4( 0.0f, 0.0f, 0.0f, 0.0f ), 0xFFFFFFFF );
   } 
}
technique10 Lighting
{
  pass P0
  {
       SetVertexShader( CompileShader( vs_4_0, VS_Light() ) );
       SetGeometryShader( NULL );
       SetPixelShader( CompileShader( ps_4_0, PS_Light() ) );
   } 
}

Edited by LPVOID, 08 September 2013 - 09:52 AM.





Old topic!
Guest, the last post of this topic is over 60 days old and at this point you may not reply in this topic. If you wish to continue this conversation start a new topic.



PARTNERS